diff --git a/.cursorrules b/.cursor/rules/bash_scripts.mdc similarity index 90% rename from .cursorrules rename to .cursor/rules/bash_scripts.mdc index 907e7759..7ac90ed6 100644 --- a/.cursorrules +++ b/.cursor/rules/bash_scripts.mdc @@ -1,3 +1,7 @@ +--- +description: Standards and Rules to follow when editing bash scripts in @bin/ +globs: bin/* +--- # Shell Script Documentation and Standards ## Shell Script Standards @@ -37,10 +41,11 @@ For all bash shell scripts, avoid using: ## Documentation Requirements -When editing or creating files in `/bin`: +When editing or creating files: - Every shell script must have comprehensive header documentation - Documentation must include: description, usage, parameters, examples - For complex logic, provide detailed examples of different use cases +- Use backticks (`) around command names, executables, and technical terms in documentation - Document all return values and exit codes - Keep documentation up to date with any changes @@ -83,7 +88,7 @@ When editing or creating files in `/bin`: - Use POSIX-compliant syntax and commands - Ensure the script is at least compatible with macOS and Linux -- Make your best effort to make the bash scripts also compatible with Windows if possible, in addition to macOS and Linux. If it's not possible, document that in the script's header documentation +- Make your best effort to make the bash scripts also compatible with Windows if possible, in addition to macOS and Linux. If it's not possible, document that in the script's header documentation, and, if the features handled by the bash script would be useful and make sense for use cases where we'd still need a Windows agent, propose your help to the author of the original script to create a PowerShell counterpart of that script to provide the same functionality for when running on Windows agents. - If the script would behave differently on different platforms despite your best efforts to make it behave the same way cross-platform, document any such platform-specific behavior in the script header documentation ### For bash scripts compatible with macOS and Linux diff --git a/.cursor/rules/context.mdc b/.cursor/rules/context.mdc new file mode 100644 index 00000000..dafadf85 --- /dev/null +++ b/.cursor/rules/context.mdc @@ -0,0 +1,11 @@ +--- +description: Provide context to the agent about this repository and how its files are used +globs: +--- +# Context + +- You are a senior software engineer working at Automattic, and you are part of the Apps Infra team, responsible for our CI infrastructure, including providing tools and scripts to run on our CI agents. +- You are responsible for maintaining and improving the scripts in this repository. +- Our CI runs on Buildkite infrastructure, using a mix of self-hosted agents (MacMinis in our DataCenter) and Linux or Windows EC2 instances on AWS. +- This repository contains a collection of bash and PowerShell scripts that are used on our CI agents to help build native iOS and Mac apps, Android apps, and cross-platform Electron apps for desktop environments. +- Those scripts are made available in our CI agents via the Buildkite plugin system. The scripts in the `bin/` directory are available in the `$PATH` of all our CI jobs that use `a8c-ci-toolkit` in their pipeline steps' `plugins:` attribute. diff --git a/.cursor/rules/powershell_scripts.mdc b/.cursor/rules/powershell_scripts.mdc new file mode 100644 index 00000000..c2c10c44 --- /dev/null +++ b/.cursor/rules/powershell_scripts.mdc @@ -0,0 +1,141 @@ +--- +description: Standards and Rules to follow when editing PowerShell scripts in @bin/ +globs: *.ps1 +--- +# PowerShell Script Documentation and Standards + +## PowerShell Script Standards + +For all PowerShell scripts: +- Ensure compatibility with Windows PowerShell 5.1 (default on Windows Server) +- Follow `PSScriptAnalyzer` recommendations +- Use proper error handling and input validation +- Use UTF-8 encoding without BOM for script files +- Document Windows-specific requirements and dependencies + +## Best Practices + +For all PowerShell scripts: +- Use `$ErrorActionPreference = "Stop"` at the beginning of scripts +- Use `Set-StrictMode -Version Latest` for strict variable and function checking +- Always use full cmdlet names (avoid aliases like `ls`, use `Get-ChildItem` instead) +- Use proper verb-noun naming convention for functions +- Quote all variable expansions in strings +- Use `[CmdletBinding()]` for advanced functions +- Use parameter validation attributes +- Include proper error handling with try/catch blocks +- Use approved PowerShell verbs (Get-Verb) +- End script files with a newline +- Use proper PowerShell case conventions: + - PascalCase for functions, cmdlets, and parameters + - camelCase for variables + - UPPERCASE for constants +- Check for and require Administrator privileges when needed using: + +```powershell +if (-not ([Security.Principal.WindowsPrincipal] [Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole([Security.Principal.WindowsBuiltinRole]::Administrator)) { + throw "This script requires Administrator privileges" +} +``` + +## Documentation Requirements + +When editing or creating PowerShell scripts in `/bin`: +- Every script must have comprehensive comment-based help +- Documentation must include: synopsis, description, parameters, examples +- For complex logic, provide detailed examples of different use cases +- Use backticks (`) around command names, executables, and technical terms in documentation +- Document all return values and terminating conditions +- Keep documentation up to date with any changes +- Clearly document Windows version requirements or limitations +- Document required Windows features or roles + +## Documentation Template + +```powershell +<# +.SYNOPSIS + Brief description of what the script does. + +.DESCRIPTION + Detailed description of the script's purpose and functionality. + +.PARAMETER ParameterName + Description of each parameter. + +.EXAMPLE + Example-1 + Detailed description of the example. + +.NOTES + Author: [Author name] + Last Edit: [Date] + Version 1.0 - Initial release + Requirements: Windows PowerShell 5.1 + Windows Requirements: + - Windows Server 2019 or later + - Required Windows Features: [list features] + - Required Windows Roles: [list roles] + - Administrator privileges: [Yes/No] + +.OUTPUTS + Description of the script's output. + +.RETURNVALUES + Description of possible return values and their meanings. +#> +``` + +## Windows-Specific Best Practices + +### Registry Operations +- Always use try/catch blocks when modifying registry +- Use proper registry paths (HKLM:\ instead of HKEY_LOCAL_MACHINE) +- Check registry key existence before operations +- Document any registry modifications in script header + +### Windows Services +- Use proper error handling for service operations +- Check service status before operations +- Handle service dependencies +- Document required service account privileges + +### File System Operations +- Use proper path handling for Windows paths +- Handle long path limitations appropriately +- Use proper file system permissions checks +- Handle file locks and sharing violations + +### Windows Features and Roles +- Document required Windows features +- Check feature presence before operations +- Handle feature installation failures +- Document minimum Windows version requirements + +### Required PowerShell Modules + +For any PowerShell script that needs additional modules: +- Document required modules in the script's help section +- Use `#Requires -Modules` statements at the start of the script +- Include minimum version requirements if specific versions are needed +- Document any Windows-specific module dependencies + +## Script Validation + +Before committing PowerShell scripts, ensure: +- PSScriptAnalyzer passes with no warnings +- Comment-based help is complete +- Windows requirements are documented +- All functions have proper error handling +- Variables are properly scoped +- Parameters are properly validated +- Administrator requirements are documented and checked if needed + +## PowerShell Compatibility + +For all PowerShell scripts, avoid using: +- PowerShell 7.x exclusive features +- Deprecated cmdlets and parameters +- Write-Host (use Write-Output or Write-Information instead) +- Positional parameters (always use named parameters) +- Global variables without explicit scope declaration diff --git a/bin/build_and_test_pod b/bin/build_and_test_pod index 195d4e52..1d811b91 100755 --- a/bin/build_and_test_pod +++ b/bin/build_and_test_pod @@ -1,27 +1,82 @@ -#!/bin/bash -eu +#!/bin/bash + +# Script: build_and_test_pod +# +# Description: +# Builds and tests a CocoaPods library using Fastlane. Handles both standard +# and Example project structures, installing dependencies as needed. +# +# Usage: +# build_and_test_pod [FASTLANE_ARGS...] +# +# Arguments: +# FASTLANE_ARGS - Arguments passed directly to fastlane (optional, defaults to 'test') +# +# Returns: +# 0 - Build and tests passed successfully +# 1 - Setup failed (gems, pods, etc.) +# 2 - Build or tests failed # -# Any arguments passed to this script will be passed through to `fastlane`. -# If no argument is passed, the `test` lane will be called by default. +# Notes: +# - Must run on a macOS CI agent with Xcode installed +# - Supports both standard pod structure and Example project structure +# - Will install dependencies in both root and Example/ directory if needed +# - Uses Fastlane for build and test execution +# +# Requirements: +# This script needs to run on a CI agent which has the following external commands installed: +# - `bundle` +# - `fastlane` (via bundler) +# - `xcrun` +# - `install_gems` (provided by a8c-ci-toolkit) +# - `install_cocoapods` (provided by a8c-ci-toolkit) + +set -euo pipefail echo "--- :rubygems: Setting up Gems" -install_gems +if ! install_gems; then + echo "Error: Failed to install required gems" >&2 + exit 1 +fi -if [ -f "Podfile.lock" ]; then - echo "--- :cocoapods: Setting up Pods" - install_cocoapods +# Install pods in root directory if needed +if [[ -f "Podfile.lock" ]]; then + echo "--- :cocoapods: Setting up Pods" + if ! install_cocoapods; then + echo "Error: Failed to install pods in root directory" >&2 + exit 1 + fi fi -if [ -f "Example/Podfile.lock" ]; then - cd Example - echo "--- :cocoapods: Setting up Pods" - install_cocoapods - cd - +# Install pods in Example directory if needed +if [[ -f "Example/Podfile.lock" ]]; then + echo "--- :cocoapods: Setting up Example Pods" + # Use pushd/popd for safer directory navigation + pushd Example > /dev/null || { + echo "Error: Failed to change to Example directory" >&2 + exit 1 + } + + if ! install_cocoapods; then + echo "Error: Failed to install pods in Example directory" >&2 + popd > /dev/null + exit 1 + fi + + popd > /dev/null fi echo "--- :test_tube: Building and Running Tests" # For some reason this fixes a failure in `lib lint` # https://github.com/Automattic/buildkite-ci/issues/7 -xcrun simctl list >> /dev/null +if ! xcrun simctl list >> /dev/null; then + echo "Error: Failed to list simulators. Is Xcode properly installed?" >&2 + exit 1 +fi -bundle exec fastlane "${@:-test}" +# Run fastlane with provided arguments or 'test' by default +if ! bundle exec fastlane "${@:-test}"; then + echo "Error: Fastlane execution failed" >&2 + exit 2 +fi diff --git a/bin/cache_cocoapods b/bin/cache_cocoapods index ee8cf065..173b55ef 100755 --- a/bin/cache_cocoapods +++ b/bin/cache_cocoapods @@ -1,5 +1,26 @@ -#!/bin/bash -eu +#!/bin/bash -# This file is deprecated, but is being kept around until v2.0 +# Script: cache_cocoapods +# +# Description: +# [DEPRECATED] This script has been replaced by `cache_cocoapods_specs_repos`. +# It now only forwards to the new script for backward compatibility. +# +# Usage: +# cache_cocoapods +# +# Returns: +# - Returns the same exit code as `cache_cocoapods_specs_repos` +# +# Notes: +# - This script is deprecated and will be removed in v2.0 +# - Users should migrate to using `cache_cocoapods_specs_repos` directly +# - This script exists only for backward compatibility +# +# Requirements: +# - `cache_cocoapods_specs_repos` (provided by a8c-ci-toolkit) +set -euo pipefail + +# Forward to the new script cache_cocoapods_specs_repos diff --git a/bin/cache_cocoapods_specs_repos b/bin/cache_cocoapods_specs_repos index 66ec1776..b5330dcc 100755 --- a/bin/cache_cocoapods_specs_repos +++ b/bin/cache_cocoapods_specs_repos @@ -1,12 +1,47 @@ -#!/bin/bash -eu +#!/bin/bash + +# Script: cache_cocoapods_specs_repos +# +# Description: +# Updates and caches CocoaPods specs repositories and the global CocoaPods cache. +# This script handles both the global CocoaPods specs repo and the global CocoaPods cache directory. +# +# Usage: +# cache_cocoapods_specs_repos +# +# Notes: +# - This script must run on a macOS CI agent with CocoaPods installed +# - It expects the current directory to be the root of an iOS/macOS project +# - If a Podfile exists, it will run `pod install` and cache the global CocoaPods cache directory +# +# Returns: +# 0 - Cache was successfully updated and saved +# 1 - Required environment variable is missing or other error occurred +# +# Requirements: +# This script needs to run on a CI agent which has the following external commands installed: +# - `bundle` +# - `pod` (via bundler) +# - `save_cache` (provided by a8c-ci-toolkit) +# +# Environment Variables: +# BUILDKITE_PIPELINE_SLUG - The slug of the current buildkite pipeline (required) + +set -euo pipefail + +# Validate required environment variable +if [[ -z "${BUILDKITE_PIPELINE_SLUG:-}" ]]; then + echo "Error: BUILDKITE_PIPELINE_SLUG environment variable is not set" + exit 1 +fi # Update CocoaPods's master specs repo (used when you don't use the CDN) bundle exec pod repo update --verbose -save_cache ~/.cocoapods "$BUILDKITE_PIPELINE_SLUG-specs-repos" --force +save_cache "${HOME}/.cocoapods" "${BUILDKITE_PIPELINE_SLUG}-specs-repos" --force -if [ -f Podfile ]; then +if [[ -f Podfile ]]; then # Update the cache of the Pods used by the repo. # Skip if the repo doesn't have a `Podfile` (e.g. lib repo with only a `.podspec`) bundle exec pod install --verbose - save_cache ~/Library/Caches/CocoaPods/ "$BUILDKITE_PIPELINE_SLUG-global-pod-cache" --force + save_cache "${HOME}/Library/Caches/CocoaPods/" "${BUILDKITE_PIPELINE_SLUG}-global-pod-cache" --force fi diff --git a/bin/download_artifact b/bin/download_artifact index db2e03d0..01dc8440 100755 --- a/bin/download_artifact +++ b/bin/download_artifact @@ -1,39 +1,112 @@ -#!/bin/bash -eu +#!/bin/bash -# Usage -# download_artifact $file_name [$destination] +# Script: download_artifact # -# $file_name should be the name of the file - it'll automatically be combined with the current build ID to differentiate between -# the same file in different jobs. It'll be identical to the `basename` of whatever you passed to `store_artifact`. +# Description: +# Downloads a previously uploaded build artifact from S3. The file is retrieved using +# the build ID and filename to match the path used by `upload_artifact`. This script +# is a core utility used by many other scripts to retrieve build outputs, test results, +# and other artifacts that were stored or shared between jobs. # -# $destination is an optional argument – by default, `download_artifact` will download the artifact to the present working directory, -# but if you'd like it stored elsewhere, just pass the path as the second argument. +# Usage: +# download_artifact [destination] +# +# Arguments: +# file_name - Name of the file to download (must match the basename of the uploaded file) +# destination - (Optional) Path where to save the file. Defaults to current directory. +# +# Examples: +# download_artifact app.ipa +# download_artifact test-results.xml ./test-output/ +# download_artifact lcov.info ./coverage/ +# download_artifact app-release.apk ./build/outputs/ +# +# Returns: +# 0 - File was successfully downloaded +# 1 - Missing file name argument +# 2 - Invalid destination or missing bucket configuration +# 3 - File not found in S3 +# 4 - AWS operation failed +# +# Environment Variables: +# ARTIFACTS_S3_BUCKET - S3 bucket name for storing artifacts (required) +# BUILDKITE_BUILD_ID - Current build ID (automatically set by Buildkite) +# +# Notes: +# - Compatible with both macOS and Linux +# - Supports S3 transfer acceleration if enabled on the bucket +# - Files are retrieved from path format: $BUILDKITE_BUILD_ID/filename +# - Creates destination directory if it doesn't exist +# - Preserves original file permissions +# - Reports download progress +# +# Requirements: +# This script needs to run on a CI agent which has the following external commands installed: +# - `aws` +# - `jq` +# - `mkdir` (for creating destination directories) -ARTIFACT_FILE_NAME=${1-} +set -euo pipefail -if [ -z "$ARTIFACT_FILE_NAME" ]; then - echo "You must pass the name of the file you want to download" +# Validate required arguments +if [[ -z "${1:-}" ]]; then + echo "Error: You must pass the name of the file you want to download" >&2 exit 1 fi -BUCKET=${ARTIFACTS_S3_BUCKET-} +ARTIFACT_FILE_NAME=$1 + +# Validate environment variables +if [[ -z "${BUILDKITE_BUILD_ID:-}" ]]; then + echo "Error: BUILDKITE_BUILD_ID environment variable is not set" >&2 + exit 2 +fi -if [ -z "$BUCKET" ]; then - echo "You must pass set the \`ARTIFACTS_S3_BUCKET\` environment variable with the S3 bucket you'd like to use" +BUCKET=${ARTIFACTS_S3_BUCKET:-} +if [[ -z "$BUCKET" ]]; then + echo "Error: You must set the \`ARTIFACTS_S3_BUCKET\` environment variable with the S3 bucket you'd like to use" >&2 exit 2 fi -OUTPUT_PATH=${2-.} +OUTPUT_PATH=${2:-.} + +# Ensure output directory exists and is writable +if [[ ! -d "$OUTPUT_PATH" ]]; then + echo "Creating destination directory '$OUTPUT_PATH'" + if ! mkdir -p "$OUTPUT_PATH"; then + echo "Error: Failed to create destination directory '$OUTPUT_PATH'" >&2 + exit 2 + fi +fi + +if [[ ! -w "$OUTPUT_PATH" ]]; then + echo "Error: Destination directory '$OUTPUT_PATH' is not writable" >&2 + exit 2 +fi KEY="$BUILDKITE_BUILD_ID/$ARTIFACT_FILE_NAME" +# Check if the file exists in S3 +if ! aws s3api head-object --bucket "$BUCKET" --key "$KEY" > /dev/null 2>&1; then + echo "Error: Artifact '$ARTIFACT_FILE_NAME' not found in build $BUILDKITE_BUILD_ID" >&2 + exit 3 +fi + # If the bucket has transfer acceleration enabled, use it! ACCELERATION_STATUS=$(aws s3api get-bucket-accelerate-configuration --bucket "$BUCKET" | jq '.Status' -r || true) -if [ "$ACCELERATION_STATUS" = "Enabled" ]; then +if [[ "$ACCELERATION_STATUS" = "Enabled" ]]; then echo "Downloading with transfer acceleration" - aws s3 cp "s3://$BUCKET/$KEY" "$OUTPUT_PATH" --endpoint-url https://s3-accelerate.amazonaws.com + if ! aws s3 cp "s3://$BUCKET/$KEY" "$OUTPUT_PATH" --endpoint-url https://s3-accelerate.amazonaws.com; then + echo "Error: Failed to download artifact from S3" >&2 + exit 4 + fi else - aws s3 cp "s3://$BUCKET/$KEY" "$OUTPUT_PATH" + if ! aws s3 cp "s3://$BUCKET/$KEY" "$OUTPUT_PATH"; then + echo "Error: Failed to download artifact from S3" >&2 + exit 4 + fi fi +echo "Successfully downloaded $ARTIFACT_FILE_NAME to $OUTPUT_PATH" + diff --git a/bin/hash_directory b/bin/hash_directory index 2d9611f0..1ad2c614 100755 --- a/bin/hash_directory +++ b/bin/hash_directory @@ -1,21 +1,99 @@ -#!/bin/bash -eu +#!/bin/bash -DIRECTORY_PATH=$1 +# Script: hash_directory +# +# Description: +# Generates a deterministic SHA-256 hash of all files in a directory. +# The hash is computed by hashing each file, sorting by path, and then +# hashing the combined result. This ensures consistent hashes regardless +# of file order or system differences. +# +# Usage: +# hash_directory +# +# Arguments: +# directory_path - Path to the directory to hash +# +# Examples: +# hash_directory build/ +# hash_directory ~/.gradle/caches/ +# hash_directory ./Pods/ +# +# Returns: +# 0 - Successfully generated hash, outputs hash to stdout +# 1 - Missing directory argument +# 2 - Directory not found or not accessible +# 3 - Required commands not available +# 4 - Hash computation failed +# +# Notes: +# - The hash is deterministic (same hash for same content regardless of file order) +# - Files are processed in parallel for better performance +# - Compatible with both macOS and Linux +# - Empty directories will produce a consistent hash +# - Symlinks are followed +# - Hidden files (starting with .) are included +# +# Requirements: +# This script needs to run on a CI agent which has the following commands: +# - `find` +# - `sort` +# - One of: +# - `shasum` (on macOS) +# - `sha256sum` (on Linux) + +set -euo pipefail -if [ -z "$1" ]; then - echo "You must pass a directory name to hash" +# Validate input argument +if [[ -z "${1:-}" ]]; then + echo "Error: You must pass a directory path to hash" >&2 exit 1 fi -# `shasum` is available on only macOS +# Validate directory exists and is accessible +if [[ ! -d "$1" ]]; then + echo "Error: Directory '$1' not found" >&2 + exit 2 +fi + +if [[ ! -r "$1" ]]; then + echo "Error: Directory '$1' is not readable" >&2 + exit 2 +fi + +DIRECTORY_PATH=$1 + +# Determine which hash command to use based on platform if command -v shasum &> /dev/null; then - sha_command=(shasum -a 256) + sha_command=(shasum -a 256) +elif command -v sha256sum &> /dev/null; then + sha_command=(sha256sum) else - sha_command=(sha256sum) + echo "Error: Neither 'shasum' nor 'sha256sum' command found" >&2 + exit 3 +fi + +# Validate find command is available +if ! command -v find &> /dev/null; then + echo "Error: 'find' command not found" >&2 + exit 3 fi -# - Find all files in the given directory -# - Run `sha256sum` on each file found – the `+` flag does it in parallel for a huge speed boost. -# - Sort the files by filename for deterministic hashing -# - Take the hash of all of the output hashes (and file paths) -find "${DIRECTORY_PATH%/}" -type f -exec "${sha_command[@]}" "{}" \+ | sort -k 2 | "${sha_command[@]}" | cut -f1 -d " " +# Validate sort command is available +if ! command -v sort &> /dev/null; then + echo "Error: 'sort' command not found" >&2 + exit 3 +fi + +# Process: +# 1. Find all files in the given directory +# 2. Run sha256sum on each file found – the `+` flag does it in parallel for speed +# 3. Sort the files by filename for deterministic hashing +# 4. Take the hash of all of the output hashes (and file paths) +if ! (find "${DIRECTORY_PATH%/}" -type f -exec "${sha_command[@]}" "{}" \+ | \ + sort -k 2 | \ + "${sha_command[@]}" | \ + cut -f1 -d " "); then + echo "Error: Failed to compute hash for directory '$1'" >&2 + exit 4 +fi diff --git a/bin/hash_file b/bin/hash_file index 909a1ba2..7009af39 100755 --- a/bin/hash_file +++ b/bin/hash_file @@ -1,17 +1,74 @@ -#!/bin/bash -eu +#!/bin/bash -if [ -z "${1:-}" ]; then - echo "You must pass a filename to hash" +# Script: hash_file +# +# Description: +# Generates a SHA-256 hash of a file using platform-appropriate commands. +# Uses `shasum` on macOS and `sha256sum` on other platforms. +# This script is a core utility used by many other scripts for cache key generation +# and file validation. +# +# Usage: +# hash_file +# +# Arguments: +# filename - Path to the file to hash +# +# Examples: +# hash_file Podfile.lock +# hash_file build/output.log +# hash_file ~/.gradle/caches/modules-2/modules-2.lock +# +# Returns: +# 0 - Successfully generated hash, outputs hash to stdout +# 1 - Missing filename argument +# 2 - File not found or not accessible +# 3 - Required hash command not available +# +# Notes: +# - The hash is printed to stdout without the filename +# - Compatible with both macOS and Linux +# - Uses SHA-256 algorithm for consistent hashing across platforms +# - Output is always lowercase hexadecimal +# +# Requirements: +# This script needs to run on a CI agent which has one of the following commands: +# - `shasum` (on macOS) +# - `sha256sum` (on Linux) +# - `cut` (for output formatting) + +set -euo pipefail + +# Validate input argument +if [[ -z "${1:-}" ]]; then + echo "Error: You must pass a filename to hash" >&2 exit 1 fi -# `shasum` is available on only macOS +# Validate file exists and is readable +if [[ ! -f "$1" ]]; then + echo "Error: File '$1' not found" >&2 + exit 2 +fi + +if [[ ! -r "$1" ]]; then + echo "Error: File '$1' is not readable" >&2 + exit 2 +fi + +# Determine which hash command to use based on platform if command -v shasum &> /dev/null; then - sha_command=(shasum -a 256) + sha_command=(shasum -a 256) +elif command -v sha256sum &> /dev/null; then + sha_command=(sha256sum) else - sha_command=(sha256sum) + echo "Error: Neither 'shasum' nor 'sha256sum' command found" >&2 + exit 3 fi -# Both `shasum` and `sha256sum` will print the hash and the file name (`$1`). +# Both `shasum` and `sha256sum` will print the hash and the file name. # We only care about the hash, so we use `cut` to extract it. -"${sha_command[@]}" "$1" | cut -f1 -d " " +if ! "${sha_command[@]}" "$1" | cut -f1 -d " "; then + echo "Error: Failed to generate hash for '$1'" >&2 + exit 3 +fi diff --git a/bin/install_cocoapods b/bin/install_cocoapods index 526e5b06..96b69143 100755 --- a/bin/install_cocoapods +++ b/bin/install_cocoapods @@ -1,13 +1,68 @@ -#!/bin/bash -eu +#!/bin/bash + +# Script: install_cocoapods +# +# Description: +# Installs CocoaPods dependencies for an iOS/macOS project, with caching support. +# This script handles both global CocoaPods caches and project-specific Pods, +# and ensures the Podfile.lock remains unchanged during installation. +# +# Usage: +# install_cocoapods +# +# Returns: +# 0 - Dependencies were successfully installed +# 1 - Missing required files or environment variables +# 2 - Pod installation failed +# 3 - Podfile.lock was modified during installation +# +# Environment Variables: +# BUILDKITE_PIPELINE_SLUG - The slug of the current buildkite pipeline (required) +# +# Notes: +# - This script must run on a macOS CI agent +# - Requires a Podfile and Podfile.lock in the current directory +# - Uses caching to speed up installations +# - Will attempt to use `pod check` if available +# - Will retry with `--repo-update` if initial install fails +# +# Requirements: +# This script needs to run on a CI agent which has the following external commands installed: +# - `bundle` +# - `pod` (via bundler) +# - `git` +# - `buildkite-agent` +# - `hash_file` (provided by a8c-ci-toolkit) +# - `restore_cache` (provided by a8c-ci-toolkit) +# - `save_cache` (provided by a8c-ci-toolkit) + +set -euo pipefail + +# Validate required environment variable +if [[ -z "${BUILDKITE_PIPELINE_SLUG:-}" ]]; then + echo "Error: BUILDKITE_PIPELINE_SLUG environment variable is not set" >&2 + exit 1 +fi + +# Validate required files +if [[ ! -f "Podfile" ]]; then + echo "Error: No Podfile found in current directory" >&2 + exit 1 +fi + +if [[ ! -f "Podfile.lock" ]]; then + echo "Error: No Podfile.lock found in current directory" >&2 + exit 1 +fi # Start by restoring specs repo and pod cache: # - The specs repo cache holds all of the Podspec files. This avoids having download them all from the CDN. # - The pod cache holds the downloaded Pod source files. This avoids having to check them out again. -restore_cache "$BUILDKITE_PIPELINE_SLUG-specs-repos" -restore_cache "$BUILDKITE_PIPELINE_SLUG-global-pod-cache" +restore_cache "${BUILDKITE_PIPELINE_SLUG}-specs-repos" +restore_cache "${BUILDKITE_PIPELINE_SLUG}-global-pod-cache" PODFILE_HASH=$(hash_file Podfile.lock) -LOCAL_CACHE_KEY="$BUILDKITE_PIPELINE_SLUG-local-pod-cache-$PODFILE_HASH" +LOCAL_CACHE_KEY="${BUILDKITE_PIPELINE_SLUG}-local-pod-cache-${PODFILE_HASH}" # Restore the local `Pods` directory based on the `Podfile.lock` contents restore_cache "$LOCAL_CACHE_KEY" @@ -16,43 +71,45 @@ restore_cache "$LOCAL_CACHE_KEY" # If it's not installed (or if it fails), we'll try to install Pods. # If that fails, it may be due to an out-of-date repo. We can use `--repo-update` to try to resolve this automatically. if bundle exec pod plugins installed | grep -q check; then - bundle exec pod check || bundle exec pod install || bundle exec pod install --repo-update --verbose + if ! (bundle exec pod check || bundle exec pod install || bundle exec pod install --repo-update --verbose); then + echo "Error: Failed to install pods" >&2 + exit 2 + fi else - bundle exec pod install || bundle exec pod install --repo-update --verbose + if ! (bundle exec pod install || bundle exec pod install --repo-update --verbose); then + echo "Error: Failed to install pods" >&2 + exit 2 + fi fi # Check that `Podfile.lock` was unchanged by `pod install`. If it was, it means # the lockfile might have been inadvertently changed. -# `shasum` is available only on macOS -if command -v shasum &> /dev/null; then - sha_command='shasum -a 256' -else - sha_command='sha256sum' -fi - function lockfile_error () { - message=$(cat <See diff +
See diff - \`\`\`diff - $(git diff -- Podfile.lock) - \`\`\` -
+\`\`\`diff +$(git diff -- Podfile.lock) +\`\`\` + EOF - ) - - echo "$message" + ) - buildkite-agent annotate "$message" --style 'error' --context 'ctx-error' + echo "$message" >&2 + buildkite-agent annotate "$message" --style 'error' --context 'ctx-error' + exit 3 } trap lockfile_error ERR echo "Checking that Podfile.lock was not modified by 'pod install'" -# Notice the two spaces as per shasum/sha256sum output -echo "${PODFILE_HASH} Podfile.lock" | $sha_command --check --status +# Use our hash_file helper instead of platform-specific commands +NEW_HASH=$(hash_file Podfile.lock) +if [[ "$PODFILE_HASH" != "$NEW_HASH" ]]; then + lockfile_error +fi # Remove trap for the lockfile error now that we've done the check. trap - ERR diff --git a/bin/install_npm_packages b/bin/install_npm_packages index fbb5cf98..e27ac09e 100755 --- a/bin/install_npm_packages +++ b/bin/install_npm_packages @@ -1,25 +1,102 @@ -#!/bin/bash -eu +#!/bin/bash -# Ensure package.json is present -if [ ! -f package.json ]; then - echo "No valid package.json file found" +# Script: install_npm_packages +# +# Description: +# Installs Node.js dependencies using npm with caching support. This script +# optimizes Node.js builds by caching node_modules based on the package-lock.json +# hash, platform, architecture, and Node.js version. +# +# Usage: +# install_npm_packages +# +# Returns: +# 0 - Dependencies were successfully installed +# 1 - Missing required files or environment variables +# 2 - Platform information retrieval failed +# 3 - Node.js command failed +# 4 - Cache operations failed +# 5 - npm install failed +# +# Environment Variables: +# BUILDKITE_PIPELINE_SLUG - The slug of the current buildkite pipeline (required) +# NODE_VERSION - Current Node.js version (automatically detected) +# +# Notes: +# - Compatible with both macOS and Linux +# - Requires package.json and package-lock.json in current directory +# - Cache key includes platform, architecture, and Node.js version +# - Uses package-lock.json hash for cache invalidation +# - Supports npm's package-lock.json format +# - Automatically handles npm install flags +# +# Requirements: +# This script needs to run on a CI agent which has the following external commands installed: +# - `node` +# - `npm` +# - `uname` +# - `hash_file` (provided by a8c-ci-toolkit) +# - `restore_cache` (provided by a8c-ci-toolkit) +# - `save_cache` (provided by a8c-ci-toolkit) + +set -euo pipefail + +# Validate required environment variable +if [[ -z "${BUILDKITE_PIPELINE_SLUG:-}" ]]; then + echo "Error: BUILDKITE_PIPELINE_SLUG environment variable is not set" >&2 exit 1 fi -# Ensure package-lock.json is present -if [ ! -f package-lock.json ]; then - echo "No valid package-lock.json file found" +# Validate required files +if [[ ! -f "package.json" ]]; then + echo "Error: No package.json file found in current directory" >&2 exit 1 fi -PLATFORM=$(uname -s) -ARCHITECTURE=$(uname -m) -NODE_VERSION=$(node --version) -PACKAGE_HASH=$(hash_file package-lock.json) -CACHEKEY="$BUILDKITE_PIPELINE_SLUG-$PLATFORM-$ARCHITECTURE-node$NODE_VERSION-$PACKAGE_HASH" +if [[ ! -f "package-lock.json" ]]; then + echo "Error: No package-lock.json file found in current directory" >&2 + exit 1 +fi -restore_cache "$CACHEKEY" +# Get platform information +if ! PLATFORM=$(uname -s); then + echo "Error: Failed to determine platform" >&2 + exit 2 +fi + +if ! ARCHITECTURE=$(uname -m); then + echo "Error: Failed to determine architecture" >&2 + exit 2 +fi + +# Get Node.js version +if ! NODE_VERSION=$(node --version); then + echo "Error: Failed to determine Node.js version" >&2 + exit 3 +fi -npm install +# Generate cache key based on platform, architecture, Node.js version, and package-lock.json hash +if ! PACKAGE_HASH=$(hash_file package-lock.json); then + echo "Error: Failed to compute hash for package-lock.json" >&2 + exit 4 +fi + +CACHE_KEY="$BUILDKITE_PIPELINE_SLUG-$PLATFORM-$ARCHITECTURE-node$NODE_VERSION-$PACKAGE_HASH" + +echo "Restoring npm packages from cache..." +if ! restore_cache "$CACHE_KEY"; then + echo "Warning: Failed to restore cache, will perform fresh install" >&2 +fi + +echo "Installing npm packages..." +if ! npm install; then + echo "Error: npm install failed" >&2 + exit 5 +fi + +echo "Saving npm packages to cache..." +if ! save_cache node_modules/ "$CACHE_KEY"; then + echo "Warning: Failed to save cache" >&2 +fi -save_cache node_modules/ "$CACHEKEY" +echo "Successfully installed npm packages" diff --git a/bin/install_swiftpm_dependencies b/bin/install_swiftpm_dependencies index 1159215a..a98285b7 100755 --- a/bin/install_swiftpm_dependencies +++ b/bin/install_swiftpm_dependencies @@ -1,4 +1,43 @@ -#!/bin/bash -eu +#!/bin/bash + +# Script: install_swiftpm_dependencies +# +# Description: +# Installs Swift Package Manager (SPM) dependencies for an Xcode project or Swift package. +# Supports both Xcode-managed dependencies (.xcworkspace/.xcodeproj) and pure SPM projects. +# Includes caching support to speed up dependency resolution. +# +# Usage: +# install_swiftpm_dependencies [--workspace PATH | --project PATH | --use-spm] +# +# Arguments: +# --workspace PATH Path to .xcworkspace file managing SPM dependencies +# --project PATH Path to .xcodeproj file managing SPM dependencies +# --use-spm Use pure SPM mode (Package.swift at root) +# -h, --help Show this help message +# +# Returns: +# 0 - Dependencies installed successfully +# 1 - Invalid arguments or missing Package.resolved +# 2 - Unexpected arguments provided +# 3 - Dependency resolution failed +# +# Notes: +# - Must run on a macOS CI agent with Xcode installed +# - Requires appropriate SSH access for private dependencies +# - Caches dependencies to speed up future builds +# - Will attempt to auto-detect project type if no flags provided +# +# Requirements: +# This script needs to run on a CI agent which has the following external commands installed: +# - `xcodebuild` +# - `swift` +# - `hash_file` (provided by a8c-ci-toolkit) +# - `restore_cache` (provided by a8c-ci-toolkit) +# - `save_cache` (provided by a8c-ci-toolkit) +# - `add_host_to_ssh_known_hosts` (provided by a8c-ci-toolkit) + +set -euo pipefail print_usage() { echo "Usage:" @@ -35,7 +74,7 @@ case ${1:-} in esac if [[ "$#" -gt 0 ]]; then - echo "Unexpected extra arguments: $*" >&2 + echo "Error: Unexpected extra arguments: $*" >&2 print_usage exit 2 fi @@ -54,17 +93,17 @@ if [[ -z "${XCWORKSPACE_PATH}" && -z "${XCODEPROJ_PATH}" && "${USE_SPM}" != "tru XCWORKSPACE_PATH="${FOUND_ROOT_WORKSPACE_PATHS[0]}" echo " --> Found a single \`.xcworkspace\` file, and no \`Package.swift\` in the root of the repo." echo " Defaulting to \`--workspace \"${XCWORKSPACE_PATH}\"\`." - elif [[ ${#FOUND_ROOT_WORKSPACE_PATHS[@]} -eq 0 && ${#FOUND_ROOT_PROJECT_PATHS[@] } -eq 1 && ! -f "Package.swift" ]]; then + elif [[ ${#FOUND_ROOT_WORKSPACE_PATHS[@]} -eq 0 && ${#FOUND_ROOT_PROJECT_PATHS[@]} -eq 1 && ! -f "Package.swift" ]]; then XCODEPROJ_PATH="${FOUND_ROOT_PROJECT_PATHS[0]}" echo " --> Found an \`.xcodeproj\`, and no \`Package.swift\` nor \`.xcworkspace\`in the root of the repo." echo " Defaulting to \`--project \"${XCODEPROJ_PATH}\`" - elif [[ ${#FOUND_ROOT_WORKSPACE_PATHS[@]} -eq 0 && ${#FOUND_ROOT_PROJECT_PATHS[@] } -eq 0 && -f "Package.swift" ]]; then + elif [[ ${#FOUND_ROOT_WORKSPACE_PATHS[@]} -eq 0 && ${#FOUND_ROOT_PROJECT_PATHS[@]} -eq 0 && -f "Package.swift" ]]; then echo " --> Found a \`Package.swift\`, and no \`.xcworkspace\` or \`.xcodeproj\` in the root of the repo." echo " Defaulting to \`--use-spm\`" USE_SPM=true else - echo " -!- No valid --workspace, --project or --use-spm flag provided, and cannot guess which one to use either, so aborting." - echo " Please call $0 with an explicit \`--workspace PATH\`, \`--project PATH\` or \`--use-spm\` flag to disambiguate." + echo "Error: No valid --workspace, --project or --use-spm flag provided, and cannot guess which one to use." >&2 + echo " Please call $0 with an explicit \`--workspace PATH\`, \`--project PATH\` or \`--use-spm\` flag to disambiguate." >&2 exit 1 fi fi @@ -80,26 +119,35 @@ elif [[ "${USE_SPM}" == "true" ]]; then fi if [[ ! -f "${PACKAGE_RESOLVED_LOCATION}" ]]; then - echo "Unable to find \`Package.resolved\` file (${PACKAGE_RESOLVED_LOCATION:-unable to guess path})" + echo "Error: Unable to find \`Package.resolved\` file (${PACKAGE_RESOLVED_LOCATION:-unable to guess path})" >&2 exit 1 fi # Restore SPM cache if it's available echo "~~~ Restoring SPM cache if available" -PACKAGE_RESOLVED_HASH=$(hash_file "${PACKAGE_RESOLVED_LOCATION}") +if ! PACKAGE_RESOLVED_HASH=$(hash_file "${PACKAGE_RESOLVED_LOCATION}"); then + echo "Error: Failed to compute hash for Package.resolved" >&2 + exit 1 +fi + CACHE_KEY="${BUILDKITE_PIPELINE_SLUG}-spm-cache-${PACKAGE_RESOLVED_HASH}" mkdir -p "${SPM_CACHE_LOCATION}" -cd "${SPM_CACHE_LOCATION}" -restore_cache "${CACHE_KEY}" -cd - +cd "${SPM_CACHE_LOCATION}" || exit 1 +if ! restore_cache "${CACHE_KEY}"; then + echo "Warning: Failed to restore cache, will download dependencies from scratch" >&2 +fi +cd - || exit 1 # This will let Xcode use the system SSH config for downloading packages -sudo defaults write com.apple.dt.Xcode IDEPackageSupportUseBuiltinSCM YES +if ! sudo defaults write com.apple.dt.Xcode IDEPackageSupportUseBuiltinSCM YES; then + echo "Warning: Failed to configure Xcode SSH settings" >&2 +fi # Trust all GitHub.com and BitBucket.org keys – this allows checking out dependencies via SSH -add_host_to_ssh_known_hosts bitbucket.org -add_host_to_ssh_known_hosts github.com +if ! add_host_to_ssh_known_hosts bitbucket.org || ! add_host_to_ssh_known_hosts github.com; then + echo "Warning: Failed to add some hosts to SSH known_hosts" >&2 +fi # Resolve the packages using the correct method if [[ -d "${XCWORKSPACE_PATH}" ]]; then @@ -109,14 +157,23 @@ if [[ -d "${XCWORKSPACE_PATH}" ]]; then # (despite the help page of `xcodebuild` suggesting that it should work without `-scheme`). Since the dependency resolution doesn't really depend on the scheme # and we don't want to have to provide or guess it, using `-list` instead stops making `xcodebuild` complain about `-workspace` not being used in conjunction # with `-scheme` (even if in practice we don't care about the scheme list it returns) - xcodebuild -workspace "${XCWORKSPACE_PATH}" -resolvePackageDependencies -onlyUsePackageVersionsFromResolvedFile -list + if ! xcodebuild -workspace "${XCWORKSPACE_PATH}" -resolvePackageDependencies -onlyUsePackageVersionsFromResolvedFile -list; then + echo "Error: Failed to resolve dependencies for workspace" >&2 + exit 3 + fi elif [[ -d "${XCODEPROJ_PATH}" ]]; then echo "~~~ Resolving Swift Packages with \`xcodebuild\`" echo "Using -project \"${XCODEPROJ_PATH}\"" - xcodebuild -project "${XCODEPROJ_PATH}" -resolvePackageDependencies -onlyUsePackageVersionsFromResolvedFile + if ! xcodebuild -project "${XCODEPROJ_PATH}" -resolvePackageDependencies -onlyUsePackageVersionsFromResolvedFile; then + echo "Error: Failed to resolve dependencies for project" >&2 + exit 3 + fi elif [[ "${USE_SPM}" == "true" ]]; then echo "~~~ Resolving packages with \`swift package\`" - swift package resolve + if ! swift package resolve; then + echo "Error: Failed to resolve dependencies with swift package" >&2 + exit 3 + fi fi # `checkouts` can be removed because the system can quickly generate them @@ -131,4 +188,6 @@ echo "Done. Removed checkouts and artifacts subfolders from $SPM_CACHE_LOCATION" # If this is the first time we've seen this particular cache key, save it for the future echo "~~~ Saving SPM Cache" -save_cache "${SPM_CACHE_LOCATION}" "${CACHE_KEY}" false --use_relative_path_in_tar +if ! save_cache "${SPM_CACHE_LOCATION}" "${CACHE_KEY}" false --use_relative_path_in_tar; then + echo "Warning: Failed to save cache" >&2 +fi diff --git a/bin/lint_pod b/bin/lint_pod index 36aefc2e..db020b94 100755 --- a/bin/lint_pod +++ b/bin/lint_pod @@ -1,17 +1,61 @@ -#!/bin/bash -eu +#!/bin/bash + +# Script: lint_pod +# +# Description: +# Runs various linting checks on a CocoaPods-based iOS/macOS project. +# Validates Ruby code style, Gemfile.lock, and Podfile.lock if present. +# +# Usage: +# lint_pod +# +# Returns: +# 0 - All checks passed successfully +# 1 - One or more checks failed +# +# Notes: +# - Must run on a macOS CI agent +# - Checks include: +# - Ruby code style via Rubocop +# - Gemfile.lock validation +# - Podfile.lock validation (if present) +# - TODO: Add SwiftLint integration +# +# Requirements: +# This script needs to run on a CI agent which has the following external commands installed: +# - `bundle` +# - `rubocop` (via bundler) +# - `install_gems` (provided by a8c-ci-toolkit) +# - `validate_gemfile_lock` (provided by a8c-ci-toolkit) +# - `validate_podfile_lock` (provided by a8c-ci-toolkit) + +set -euo pipefail echo "--- :rubygems: Setting up Gems" -install_gems +if ! install_gems; then + echo "Error: Failed to install required gems" >&2 + exit 1 +fi echo "--- :rubygems: Checking Gemfile.lock" -validate_gemfile_lock +if ! validate_gemfile_lock; then + echo "Error: Gemfile.lock validation failed" >&2 + exit 1 +fi echo "--- :rubocop: Running Rubocop" -bundle exec rubocop +if ! bundle exec rubocop; then + echo "Error: Rubocop checks failed" >&2 + exit 1 +fi -if [ -f "Podfile.lock" ]; then - echo "--- :cocoapods: Checking Podfile.lock" - validate_podfile_lock +if [[ -f "Podfile.lock" ]]; then + echo "--- :cocoapods: Checking Podfile.lock" + if ! validate_podfile_lock; then + echo "Error: Podfile.lock validation failed" >&2 + exit 1 + fi fi -# TODO: Add swiftlint \ No newline at end of file +# TODO: Add SwiftLint integration +# This would involve running `run_swiftlint` if a .swiftlint.yml file is present \ No newline at end of file diff --git a/bin/nvm_install b/bin/nvm_install index 8475db12..abb29b18 100755 --- a/bin/nvm_install +++ b/bin/nvm_install @@ -1,5 +1,24 @@ #!/bin/bash +# Script: nvm_install +# +# Description: +# [DEPRECATED] This script has been replaced by the `automattic/nvm-buildkite-plugin`. +# It now only provides a helpful error message directing users to migrate. +# +# Usage: +# nvm_install +# +# Returns: +# 1 - Always exits with error to indicate deprecation +# +# Notes: +# - This script is deprecated and will be removed in a future version +# - Users should migrate to using the `automattic/nvm-buildkite-plugin` +# - See migration guide at: https://github.com/Automattic/a8c-ci-toolkit-buildkite-plugin/blob/trunk/MIGRATION.md#from-200-to-300 + +set -euo pipefail + echo 'The nvm_install utility has been removed and replaced by our nvm Buildkite plugin.' echo 'Please see this migration guide for details: https://github.com/Automattic/a8c-ci-toolkit-buildkite-plugin/blob/trunk/MIGRATION.md#from-200-to-300' diff --git a/bin/patch-cocoapods b/bin/patch-cocoapods index 4eed7846..ac8f7bf2 100755 --- a/bin/patch-cocoapods +++ b/bin/patch-cocoapods @@ -1,13 +1,33 @@ #!/bin/bash -# This is a hack to workaround https://github.com/CocoaPods/CocoaPods/issues/12033. -# We can remove this script once the issue is fixed. +# Script: patch-cocoapods # -# This script updates the cocoapods source code to change Xcode project targets' -# minimal deployment target to 13.0, which is a requirement in newer Xcode versions. +# Description: +# Patches the CocoaPods gem to work around deployment target issues with newer Xcode versions. +# This is a temporary fix for https://github.com/CocoaPods/CocoaPods/issues/12033. +# The script updates the minimum iOS deployment target to 13.0 for framework targets. +# +# Usage: +# patch-cocoapods +# +# Returns: +# 0 - Patch was successfully applied +# 1 - Failed to locate CocoaPods gem or patch application failed +# +# Notes: +# - This script is temporary and will be removed once the upstream issue is fixed +# - Must run on a macOS CI agent with CocoaPods installed via bundler +# - Modifies the CocoaPods gem files directly +# - Will attempt to reverse and reapply patch if initial application fails +# +# Requirements: +# This script needs to run on a CI agent which has the following external commands installed: +# - `bundle` +# - `patch` set -euo pipefail +# The patch content to apply to CocoaPods patch_content=$(cat <<'EOF' --- lib/cocoapods/installer/analyzer.rb +++ lib/cocoapods/installer/analyzer.rb @@ -34,14 +54,30 @@ patch_content=$(cat <<'EOF' EOF ) -cocoapods_gem_path=$(bundle info cocoapods --path) -echo "Path to the cocoapods gem: $cocoapods_gem_path" +# Get the path to the installed CocoaPods gem +if ! cocoapods_gem_path=$(bundle info cocoapods --path); then + echo "Error: Failed to locate CocoaPods gem path" >&2 + exit 1 +fi +echo "Path to the CocoaPods gem: $cocoapods_gem_path" +# Try to apply the patch if echo "$patch_content" | patch --forward --force --directory "$cocoapods_gem_path" --strip 0; then - echo "cocoapods patched successfully" + echo "CocoaPods patched successfully" exit 0 fi -echo "Failed to patch cocoapods. Try to re-apply the patch" -echo "$patch_content" | patch --reverse --force --directory "$cocoapods_gem_path" --strip 0 -echo "$patch_content" | patch --forward --force --directory "$cocoapods_gem_path" --strip 0 +# If initial patch failed, try to reverse and reapply +echo "Initial patch application failed, attempting to reverse and reapply..." + +if ! echo "$patch_content" | patch --reverse --force --directory "$cocoapods_gem_path" --strip 0; then + echo "Error: Failed to reverse existing patch" >&2 + exit 1 +fi + +if ! echo "$patch_content" | patch --forward --force --directory "$cocoapods_gem_path" --strip 0; then + echo "Error: Failed to reapply patch" >&2 + exit 1 +fi + +echo "CocoaPods patched successfully after reapply" diff --git a/bin/publish_pod b/bin/publish_pod index 29e90bf5..5614359f 100755 --- a/bin/publish_pod +++ b/bin/publish_pod @@ -1,21 +1,52 @@ -#!/bin/bash -eu +#!/bin/bash -# Usage: publish_pod [OPTIONS] PODSPEC_PATH +# Script: publish_pod # -# OPTIONS: -# `--patch-cocoapods`: -# Apply a patch to work around issues with older deployment targets — see https://github.com/CocoaPods/CocoaPods/issues/12033 -# `--allow-warnings`, `--synchronous`: -# Those options are passed to `pod trunk push` verbatim. +# Description: +# Publishes a CocoaPods podspec to the public CocoaPods trunk repository. +# Includes validation of version numbers against git tags and handles +# co-dependent pods in the same repository. # -# Note: Use `--synchronous` if you have co-dependant podspecs in your repo and need to publish multiple pods at the same time. -# Without this option, since the first pod you push will take time to propagate thru the CocoaPods CDNs, attempting to push -# the other dependant pod(s) in your repo might fail to find the first pushed pod until it has propagated thru CDNs. +# Usage: +# publish_pod [OPTIONS] PODSPEC_PATH # +# Arguments: +# PODSPEC_PATH - Path to the podspec file to publish +# +# Options: +# --patch-cocoapods - Apply patch for older deployment targets (see Notes) +# --allow-warnings - Allow warnings during pod push +# --synchronous - Wait for CDN propagation (recommended for co-dependent pods) +# +# Returns: +# 0 - Pod was successfully published +# 1 - Missing arguments or validation failed +# 2 - Pod push failed +# +# Environment Variables: +# BUILDKITE_TAG - Git tag being built (optional, used for version validation) +# +# Notes: +# - Must run on a macOS CI agent with Xcode installed +# - Requires CocoaPods trunk authentication to be configured +# - The --synchronous option is recommended when publishing multiple co-dependent +# pods to ensure proper CDN propagation between pushes +# - The --patch-cocoapods option works around: https://github.com/CocoaPods/CocoaPods/issues/12033 +# +# Requirements: +# This script needs to run on a CI agent which has the following external commands installed: +# - `bundle` +# - `pod` (via bundler) +# - `jq` +# - `xcrun` +# - `patch-cocoapods` (provided by a8c-ci-toolkit) + +set -euo pipefail PATCH_COCOAPODS="false" COCOAPODS_FLAGS=(--verbose) +# Parse command line options while [[ "$#" -gt 0 ]]; do case $1 in --patch-cocoapods) @@ -29,23 +60,46 @@ while [[ "$#" -gt 0 ]]; do shift done +# Validate required arguments +if [[ "$#" -lt 1 ]]; then + echo "Error: Missing podspec path argument" >&2 + exit 1 +fi + PODSPEC_PATH=$1 -# POD_NAME=$(bundle exec pod ipc spec "$PODSPEC_PATH" | jq -r '.name') -POD_VERSION=$(bundle exec pod ipc spec "$PODSPEC_PATH" | jq -r '.version') +if [[ ! -f "$PODSPEC_PATH" ]]; then + echo "Error: Podspec file not found at $PODSPEC_PATH" >&2 + exit 1 +fi + +# Extract pod version and validate against git tag if present +if ! POD_VERSION=$(bundle exec pod ipc spec "$PODSPEC_PATH" | jq -r '.version'); then + echo "Error: Failed to extract pod version from podspec" >&2 + exit 1 +fi -if [ -n "$BUILDKITE_TAG" ] && [ "$BUILDKITE_TAG" != "$POD_VERSION" ]; then - echo "Tag $BUILDKITE_TAG does not match version $POD_VERSION from $PODSPEC_PATH." +if [[ -n "${BUILDKITE_TAG:-}" ]] && [[ "$BUILDKITE_TAG" != "$POD_VERSION" ]]; then + echo "Error: Tag $BUILDKITE_TAG does not match version $POD_VERSION from $PODSPEC_PATH" >&2 exit 1 fi -if [[ "${PATCH_COCOAPODS}" == 'true' ]]; then +if [[ "${PATCH_COCOAPODS}" == 'true' ]]; then echo "⚠️ Remove this step once this issue is fixed: https://github.com/CocoaPods/CocoaPods/issues/12033" - patch-cocoapods + if ! patch-cocoapods; then + echo "Error: Failed to apply CocoaPods patch" >&2 + exit 1 + fi fi # For some reason this fixes a failure in `lib lint` # https://github.com/Automattic/buildkite-ci/issues/7 -xcrun simctl list >> /dev/null +if ! xcrun simctl list >> /dev/null; then + echo "Error: Failed to list simulators. Is Xcode properly installed?" >&2 + exit 1 +fi -bundle exec pod trunk push "${COCOAPODS_FLAGS[@]}" "$PODSPEC_PATH" +if ! bundle exec pod trunk push "${COCOAPODS_FLAGS[@]}" "$PODSPEC_PATH"; then + echo "Error: Failed to publish pod to trunk" >&2 + exit 2 +fi diff --git a/bin/restore_cache b/bin/restore_cache index c09314d9..8feefd3d 100755 --- a/bin/restore_cache +++ b/bin/restore_cache @@ -1,17 +1,71 @@ -#!/bin/bash -eu +#!/bin/bash -CACHE_KEY=$1 +# Script: restore_cache +# +# Description: +# Restores a file or directory from an S3 bucket cache. This is the companion +# script to `save_cache` and handles decompressing and restoring cached files. +# This script is a core utility used by many other scripts to restore cached +# build artifacts, dependencies, and other large files to speed up CI/CD pipelines. +# +# Usage: +# restore_cache +# +# Arguments: +# cache_key - The key used when the cache was saved +# +# Examples: +# restore_cache "my-project-pods-cache" +# restore_cache "gradle-cache" +# restore_cache "build-cache-$(hash_file build/output)" +# restore_cache "${BUILDKITE_PIPELINE_SLUG}-node-modules" +# +# Returns: +# 0 - Cache was successfully restored or cache miss (not found) +# 1 - Missing required arguments +# 2 - Missing bucket configuration +# 3 - Download or extraction failed +# +# Environment Variables: +# CACHE_BUCKET_NAME - S3 bucket name for caching (optional if plugin bucket is configured) +# BUILDKITE_PLUGIN_A8C_CI_TOOLKIT_BUCKET - Plugin-provided S3 bucket name (optional) +# +# Notes: +# - Compatible with both macOS and Linux +# - Supports S3 transfer acceleration if enabled on the bucket +# - Will not fail if cache entry doesn't exist (returns 0) +# - Automatically handles decompression of tar/gzip archives +# - Preserves file permissions during extraction +# - Reports cache size and restoration duration +# +# Requirements: +# This script needs to run on a CI agent which has the following external commands installed: +# - `aws` +# - `jq` +# - `tar` +# - `awk` (for size calculations) + +set -euo pipefail bytes_to_mb() { local bytes=$1 printf "%.2f" "$(awk "BEGIN {print $bytes / (1024 * 1024)}")" } -S3_BUCKET_NAME=${CACHE_BUCKET_NAME-} -if [ -z "$S3_BUCKET_NAME" ]; then - if [ -z "$BUILDKITE_PLUGIN_A8C_CI_TOOLKIT_BUCKET" ]; then - echo "⛔Unable to restore file from cache – no \$CACHE_BUCKET_NAME is set" - exit 1 +# Validate required arguments +if [[ -z "${1:-}" ]]; then + echo "Error: You must provide a cache key to restore" >&2 + exit 1 +fi + +CACHE_KEY=$1 + +# Validate bucket configuration +S3_BUCKET_NAME=${CACHE_BUCKET_NAME:-} +if [[ -z "$S3_BUCKET_NAME" ]]; then + if [[ -z "${BUILDKITE_PLUGIN_A8C_CI_TOOLKIT_BUCKET:-}" ]]; then + echo "Error: Unable to restore file from cache – no \$CACHE_BUCKET_NAME is set" >&2 + exit 2 else echo "Reading bucket name from 'BUILDKITE_PLUGIN_A8C_CI_TOOLKIT_BUCKET'" S3_BUCKET_NAME="$BUILDKITE_PLUGIN_A8C_CI_TOOLKIT_BUCKET" @@ -20,6 +74,7 @@ fi echo "Using $S3_BUCKET_NAME as cache bucket" +# Check if cache entry exists and restore if found if aws s3api head-object --bucket "$S3_BUCKET_NAME" --key "$CACHE_KEY" > /dev/null 2>&1; then SECONDS=0 echo "Restoring cache entry $CACHE_KEY" @@ -28,19 +83,31 @@ if aws s3api head-object --bucket "$S3_BUCKET_NAME" --key "$CACHE_KEY" > /dev/nu # If the bucket has transfer acceleration enabled, use it! ACCELERATION_STATUS=$(aws s3api get-bucket-accelerate-configuration --bucket "$S3_BUCKET_NAME" | jq '.Status' -r || true) - if [ "$ACCELERATION_STATUS" = "Enabled" ]; then + if [[ "$ACCELERATION_STATUS" = "Enabled" ]]; then echo "Downloading with transfer acceleration" - aws s3 cp "s3://$S3_BUCKET_NAME/$CACHE_KEY" "$CACHE_KEY" --quiet --endpoint-url https://s3-accelerate.amazonaws.com + if ! aws s3 cp "s3://$S3_BUCKET_NAME/$CACHE_KEY" "$CACHE_KEY" --quiet --endpoint-url https://s3-accelerate.amazonaws.com; then + echo "Error: Failed to download cache from S3" >&2 + rm -f "$CACHE_KEY" + exit 3 + fi else - aws s3 cp "s3://$S3_BUCKET_NAME/$CACHE_KEY" "$CACHE_KEY" --quiet + if ! aws s3 cp "s3://$S3_BUCKET_NAME/$CACHE_KEY" "$CACHE_KEY" --quiet; then + echo "Error: Failed to download cache from S3" >&2 + rm -f "$CACHE_KEY" + exit 3 + fi fi CACHE_SIZE=$(wc -c < "$CACHE_KEY") echo " Decompressing" - tar -xf "$CACHE_KEY" + if ! tar -xf "$CACHE_KEY"; then + echo "Error: Failed to extract cache archive" >&2 + rm -f "$CACHE_KEY" + exit 3 + fi echo " Cleaning Up" - rm "$CACHE_KEY" + rm -f "$CACHE_KEY" duration=$SECONDS echo "Cache entry successfully restored" diff --git a/bin/restore_gradle_dependency_cache b/bin/restore_gradle_dependency_cache index c21a66d9..23399ba3 100755 --- a/bin/restore_gradle_dependency_cache +++ b/bin/restore_gradle_dependency_cache @@ -1,4 +1,51 @@ -#!/bin/bash -eu +#!/bin/bash + +# Script: restore_gradle_dependency_cache +# +# Description: +# Restores the Gradle dependency cache from S3 to speed up builds. +# This script optimizes Android/Java builds by restoring cached dependencies, +# significantly reducing build times in CI/CD pipelines. +# +# Usage: +# restore_gradle_dependency_cache +# +# Returns: +# 0 - Cache was successfully restored or cache miss +# 1 - Missing required environment variables +# 2 - Failed to create or access cache directories +# 3 - Failed to restore cache from S3 +# +# Environment Variables: +# BUILDKITE_PIPELINE_SLUG - The slug of the current buildkite pipeline (required) +# GRADLE_HOME - The Gradle home directory (required) +# +# Notes: +# - Must run on a Linux CI agent with Gradle installed +# - Compatible with save_gradle_dependency_cache script +# - Uses version V2 of the cache key format +# - Will not fail if cache doesn't exist (returns 0) +# - Creates necessary directories if they don't exist +# - See: https://docs.gradle.org/current/userguide/dependency_resolution.html#sub:cache_copy +# +# Requirements: +# This script needs to run on a CI agent which has the following external commands installed: +# - `gradle` +# - `mkdir` +# - `restore_cache` (provided by a8c-ci-toolkit) + +set -euo pipefail + +# Validate required environment variables +if [[ -z "${BUILDKITE_PIPELINE_SLUG:-}" ]]; then + echo "Error: BUILDKITE_PIPELINE_SLUG environment variable is not set" >&2 + exit 1 +fi + +if [[ -z "${GRADLE_HOME:-}" ]]; then + echo "Error: GRADLE_HOME environment variable is not set" >&2 + exit 1 +fi # The key is shared with `bin/save_gradle_dependency_cache` GRADLE_DEPENDENCY_CACHE_KEY="${BUILDKITE_PIPELINE_SLUG}_GRADLE_DEPENDENCY_CACHE_V2" @@ -8,17 +55,21 @@ echo "Restoring Gradle dependency cache..." # The directory is shared with `bin/save_gradle_dependency_cache` GRADLE_DEP_CACHE="$GRADLE_HOME/dependency-cache" -DEP_CACHE_BASE_FOLDER=$(dirname "$GRADLE_DEP_CACHE") -DEP_CACHE_FOLDER_NAME=$(basename "$GRADLE_DEP_CACHE") +if ! mkdir -p "$GRADLE_DEP_CACHE"; then + echo "Error: Failed to create Gradle dependency cache directory" >&2 + exit 2 +fi -# `save_cache` & `restore_cache` scripts only work if they are called from the same directory -pushd "$DEP_CACHE_BASE_FOLDER" -restore_cache "$GRADLE_DEPENDENCY_CACHE_KEY" +# `restore_cache` & `save_cache` scripts only work if they are called from the same directory +if ! cd "$(dirname "$GRADLE_DEP_CACHE")"; then + echo "Error: Failed to change to cache base directory" >&2 + exit 2 +fi -if [ -d "$DEP_CACHE_FOLDER_NAME/modules-2/" ]; then - echo "Placing Gradle dependency cache..." - mv "$DEP_CACHE_FOLDER_NAME/modules-2/"* caches/modules-2/ - rm -r "$DEP_CACHE_FOLDER_NAME" +# For now we are using a single key - we might expand on this later by using dependency catalog version +if ! restore_cache "$GRADLE_DEPENDENCY_CACHE_KEY"; then + echo "Error: Failed to restore Gradle dependency cache from S3" >&2 + exit 3 fi -popd +echo "Successfully restored Gradle dependency cache" diff --git a/bin/run_swiftlint b/bin/run_swiftlint index 32572d32..446d9270 100755 --- a/bin/run_swiftlint +++ b/bin/run_swiftlint @@ -1,48 +1,98 @@ -#!/bin/bash -eu +#!/bin/bash + +# Script: run_swiftlint +# +# Description: +# Runs SwiftLint on Swift code using a Docker container to ensure consistent +# linting across different environments. Supports configurable strictness levels +# and reports issues as Buildkite annotations. +# +# Usage: +# run_swiftlint [--strict | --lenient] +# +# Options: +# --strict - Enable strict mode for linting +# --lenient - Enable lenient mode for linting +# +# Returns: +# 0 - No linting issues found +# 1 - Invalid arguments or missing configuration +# 2 - Linting issues found (warnings or errors) +# +# Notes: +# - Requires a .swiftlint.yml file in the current directory +# - The .swiftlint.yml must specify swiftlint_version +# - Uses Docker to run SwiftLint for consistent versioning +# - Reports both warnings and errors as Buildkite annotations +# +# Requirements: +# This script needs to run on a CI agent which has the following external commands installed: +# - `docker` +# - `awk` +# - `buildkite-agent` + +set -euo pipefail echo "--- :swift: Running SwiftLint" SWIFTLINT_ARGUMENTS=(--quiet --reporter relative-path) +# Parse command line options while [[ "$#" -gt 0 ]]; do - case $1 in - --strict | --lenient) - SWIFTLINT_ARGUMENTS+=("$1") - shift - ;; - *) break ;; - esac + case $1 in + --strict | --lenient) + SWIFTLINT_ARGUMENTS+=("$1") + shift + ;; + *) break ;; + esac done +# Validate arguments if [[ $# -gt 0 ]]; then - echo "Error: invalid arguments." - echo "Usage: $0 [--strict | --lenient]" - exit 1 + echo "Error: Invalid arguments" >&2 + echo "Usage: $0 [--strict | --lenient]" >&2 + exit 1 fi -SWIFTLINT_VERSION="$(<.swiftlint.yml awk '/^swiftlint_version: / {print $2}')" -if [ -z "$SWIFTLINT_VERSION" ]; then - echo "Your \`.swiftlint.yml\` file must contain a key for \`swiftlint_version:\` so that we know which version of SwiftLint to use to lint your codebase." - exit 1 +# Validate configuration file exists +if [[ ! -f ".swiftlint.yml" ]]; then + echo "Error: No .swiftlint.yml file found in current directory" >&2 + exit 1 fi -SWIFTLINT_DOCKER_CMD=(docker run --rm -v "$PWD":/workspace -w /workspace ghcr.io/realm/swiftlint:"$SWIFTLINT_VERSION" swiftlint) +# Extract SwiftLint version from config +SWIFTLINT_VERSION=$(<.swiftlint.yml awk '/^swiftlint_version: / {print $2}') +if [[ -z "$SWIFTLINT_VERSION" ]]; then + echo "Error: Your .swiftlint.yml file must contain a key for 'swiftlint_version:' so that we know which version of SwiftLint to use to lint your codebase" >&2 + exit 1 +fi + +# Prepare Docker command +SWIFTLINT_DOCKER_CMD=(docker run --rm -v "$PWD":/workspace -w /workspace "ghcr.io/realm/swiftlint:$SWIFTLINT_VERSION" swiftlint) + +# Run SwiftLint set +e SWIFTLINT_OUTPUT=$("${SWIFTLINT_DOCKER_CMD[@]}" lint "${SWIFTLINT_ARGUMENTS[@]}") SWIFTLINT_EXIT_STATUS=$? set -e +# Extract warnings and errors WARNINGS=$(echo -e "$SWIFTLINT_OUTPUT" | awk -F': ' '/: warning:/ {printf "- `%s`: %s\n", $1, $4}') ERRORS=$(echo -e "$SWIFTLINT_OUTPUT" | awk -F': ' '/: error:/ {printf "- `%s`: %s\n", $1, $4}') -if [ -n "$WARNINGS" ]; then - echo "$WARNINGS" - printf "**SwiftLint Warnings**\n%b" "$WARNINGS" | buildkite-agent annotate --style 'warning' +# Report warnings +if [[ -n "$WARNINGS" ]]; then + echo "SwiftLint Warnings:" + echo "$WARNINGS" + printf "**SwiftLint Warnings**\n%b" "$WARNINGS" | buildkite-agent annotate --style 'warning' fi -if [ -n "$ERRORS" ]; then - echo "$ERRORS" - printf "**SwiftLint Errors**\n%b" "$ERRORS" | buildkite-agent annotate --style 'error' +# Report errors +if [[ -n "$ERRORS" ]]; then + echo "SwiftLint Errors:" + echo "$ERRORS" + printf "**SwiftLint Errors**\n%b" "$ERRORS" | buildkite-agent annotate --style 'error' fi exit $SWIFTLINT_EXIT_STATUS diff --git a/bin/save_cache b/bin/save_cache index 045dc338..a439ff15 100755 --- a/bin/save_cache +++ b/bin/save_cache @@ -1,39 +1,111 @@ -#!/bin/bash -eu +#!/bin/bash -CACHE_FILE=$1 -CACHE_KEY=$2 +# Script: save_cache +# +# Description: +# Saves a file or directory to an S3 bucket for caching purposes. The cache key can be +# provided or automatically derived from the file/directory hash. This script is a core +# utility used by many other scripts to cache build artifacts, dependencies, and other +# large files to speed up CI/CD pipelines. +# +# Usage: +# save_cache [cache_key] [--force] [--use_relative_path_in_tar] +# +# Arguments: +# file_or_directory - The file or directory to cache +# cache_key - (Optional) The key to use for caching. If not provided, will be derived from content hash +# +# Options: +# --force - Force overwrite of existing cache entry +# --use_relative_path_in_tar - Store only relative paths in the tar archive +# +# Examples: +# save_cache ~/Library/Caches/CocoaPods/ "my-project-pods-cache" +# save_cache build/output "build-cache-$(hash_file build/output)" +# save_cache ~/.gradle "gradle-cache" --force +# save_cache node_modules "node-cache" --use_relative_path_in_tar +# +# Returns: +# 0 - Cache was successfully saved or already existed +# 1 - Missing required arguments +# 2 - File/directory not found or not accessible +# 3 - Missing bucket configuration +# 4 - AWS operation failed +# +# Environment Variables: +# CACHE_BUCKET_NAME - S3 bucket name for caching (optional if plugin bucket is configured) +# BUILDKITE_PLUGIN_A8C_CI_TOOLKIT_BUCKET - Plugin-provided S3 bucket name (optional) +# +# Notes: +# - Compatible with both macOS and Linux +# - Uses content-based hashing for cache keys if not provided +# - Supports S3 transfer acceleration if enabled on the bucket +# - Automatically compresses files using tar/gzip +# - Handles both files and directories +# - Preserves file permissions +# +# Requirements: +# This script needs to run on a CI agent which has the following external commands installed: +# - `aws` +# - `jq` +# - `tar` +# - `hash_file` (provided by a8c-ci-toolkit) +# - `hash_directory` (provided by a8c-ci-toolkit) + +set -euo pipefail bytes_to_mb() { local bytes=$1 printf "%.2f" "$(awk "BEGIN {print $bytes / (1024 * 1024)}")" } -if [ -z "$CACHE_FILE" ]; then - echo "You must pass the file or directory you want to be cached" +# Validate required arguments +if [[ -z "${1:-}" ]]; then + echo "Error: You must pass the file or directory you want to be cached" >&2 exit 1 fi +CACHE_FILE=$1 +if [[ ! -e "$CACHE_FILE" ]]; then + echo "Error: File or directory '$CACHE_FILE' not found" >&2 + exit 2 +fi + +if [[ ! -r "$CACHE_FILE" ]]; then + echo "Error: File or directory '$CACHE_FILE' is not readable" >&2 + exit 2 +fi + +CACHE_KEY=${2:-} + # We can automatically derive a cache key if one isn't provided -if [ -z "$CACHE_KEY" ]; then +if [[ -z "$CACHE_KEY" ]]; then echo "No cache key provided – automatically deriving one:" - # if the $CACHE_FILE is a directory, derived the key from the hash of all files within it + # if the $CACHE_FILE is a directory, derive the key from the hash of all files within it if [[ -d $CACHE_FILE ]]; then - CACHE_KEY=$(hash_directory "$CACHE_FILE") + if ! CACHE_KEY=$(hash_directory "$CACHE_FILE"); then + echo "Error: Failed to compute hash for directory" >&2 + exit 2 + fi echo " '$CACHE_FILE' is a directory with the hash $CACHE_KEY" # if the $CACHE_FILE is a regular file, derive the key from the file's hash elif [[ -f $CACHE_FILE ]]; then - CACHE_KEY=$(hash_file "$CACHE_FILE") + if ! CACHE_KEY=$(hash_file "$CACHE_FILE"); then + echo "Error: Failed to compute hash for file" >&2 + exit 2 + fi echo " '$CACHE_FILE' is a file with the hash $CACHE_KEY" fi fi -S3_BUCKET_NAME=${CACHE_BUCKET_NAME-} -if [ -z "$S3_BUCKET_NAME" ]; then - if [ -z "$BUILDKITE_PLUGIN_A8C_CI_TOOLKIT_BUCKET" ]; then - echo "⛔Unable to save file to cache – no \$CACHE_BUCKET_NAME is set" - exit 1 +# Validate bucket configuration +S3_BUCKET_NAME=${CACHE_BUCKET_NAME:-} +if [[ -z "$S3_BUCKET_NAME" ]]; then + if [[ -z "${BUILDKITE_PLUGIN_A8C_CI_TOOLKIT_BUCKET:-}" ]]; then + echo "Error: Unable to save file to cache – no \$CACHE_BUCKET_NAME is set" >&2 + exit 3 else echo "Reading bucket name from 'BUILDKITE_PLUGIN_A8C_CI_TOOLKIT_BUCKET'" S3_BUCKET_NAME="$BUILDKITE_PLUGIN_A8C_CI_TOOLKIT_BUCKET" @@ -43,27 +115,36 @@ fi echo "Using $S3_BUCKET_NAME as cache bucket" # Use with caution – in general it's not a good idea to overwrite a cache entry -SHOULD_FORCE=${3-false} +SHOULD_FORCE=${3:-false} if [[ "$SHOULD_FORCE" == '--force' ]]; then echo "Deleting the existing cache key" - aws s3 rm "s3://$S3_BUCKET_NAME/$CACHE_KEY" + if ! aws s3 rm "s3://$S3_BUCKET_NAME/$CACHE_KEY"; then + echo "Warning: Failed to delete existing cache key" >&2 + fi fi +# Check if cache entry already exists if ! aws s3api head-object --bucket "$S3_BUCKET_NAME" --key "$CACHE_KEY" > /dev/null 2>&1; then SECONDS=0 echo "No existing cache entry for $CACHE_KEY – storing in cache" echo " Compressing" - TAR_CONFIG=${4-} + TAR_CONFIG=${4:-} if [[ "$TAR_CONFIG" == '--use_relative_path_in_tar' ]]; then # This is used by actions such as `install_swiftpm_dependencies` # This configuration allows the tar to not include the full system path of the # directory that's being archived. For example, this will save only the # "DIRECTORY_BEING_ARCHIVED" in `/User/builder/DIRECTORY_BEING_ARCHIVED` # instead of also creating `/User/builder` when extracting the archive - tar -czf "$CACHE_KEY" -C "$CACHE_FILE" . + if ! tar -czf "$CACHE_KEY" -C "$CACHE_FILE" .; then + echo "Error: Failed to create tar archive" >&2 + exit 4 + fi else - tar -czf "$CACHE_KEY" "$CACHE_FILE" + if ! tar -czf "$CACHE_KEY" "$CACHE_FILE"; then + echo "Error: Failed to create tar archive" >&2 + exit 4 + fi fi CACHE_SIZE=$(wc -c < "$CACHE_KEY") @@ -71,15 +152,23 @@ if ! aws s3api head-object --bucket "$S3_BUCKET_NAME" --key "$CACHE_KEY" > /dev/ # If the bucket has transfer acceleration enabled, use it! ACCELERATION_STATUS=$(aws s3api get-bucket-accelerate-configuration --bucket "$S3_BUCKET_NAME" | jq '.Status' -r || true) - if [ "$ACCELERATION_STATUS" = "Enabled" ]; then + if [[ "$ACCELERATION_STATUS" = "Enabled" ]]; then echo "Uploading with transfer acceleration" - aws s3 cp "$CACHE_KEY" "s3://$S3_BUCKET_NAME/$CACHE_KEY" --quiet --endpoint-url https://s3-accelerate.amazonaws.com + if ! aws s3 cp "$CACHE_KEY" "s3://$S3_BUCKET_NAME/$CACHE_KEY" --quiet --endpoint-url https://s3-accelerate.amazonaws.com; then + echo "Error: Failed to upload cache to S3" >&2 + rm -f "$CACHE_KEY" + exit 4 + fi else - aws s3 cp "$CACHE_KEY" "s3://$S3_BUCKET_NAME/$CACHE_KEY" --quiet + if ! aws s3 cp "$CACHE_KEY" "s3://$S3_BUCKET_NAME/$CACHE_KEY" --quiet; then + echo "Error: Failed to upload cache to S3" >&2 + rm -f "$CACHE_KEY" + exit 4 + fi fi echo " Cleaning Up" - rm "$CACHE_KEY" + rm -f "$CACHE_KEY" duration=$SECONDS echo "Cache entry successfully saved" diff --git a/bin/save_gradle_dependency_cache b/bin/save_gradle_dependency_cache index abc5f5ea..a3b752a2 100755 --- a/bin/save_gradle_dependency_cache +++ b/bin/save_gradle_dependency_cache @@ -1,4 +1,54 @@ -#!/bin/bash -eu +#!/bin/bash + +# Script: save_gradle_dependency_cache +# +# Description: +# Saves the Gradle dependency cache to S3 for reuse in future builds. +# This script optimizes Android/Java builds by caching resolved dependencies, +# significantly reducing build times in CI/CD pipelines. +# +# Usage: +# save_gradle_dependency_cache +# +# Returns: +# 0 - Cache was successfully saved +# 1 - Missing required environment variables +# 2 - Failed to create or access cache directories +# 3 - Failed to copy or clean cache files +# 4 - Failed to save cache to S3 +# +# Environment Variables: +# BUILDKITE_PIPELINE_SLUG - The slug of the current buildkite pipeline (required) +# GRADLE_HOME - The Gradle home directory (required) +# +# Notes: +# - Must run on a Linux CI agent with Gradle installed +# - Removes lock files and gc.properties before caching as per Gradle recommendations +# - Uses version V2 of the cache key format +# - Compatible with restore_gradle_dependency_cache script +# - Only caches the modules-2 directory for efficiency +# - See: https://docs.gradle.org/current/userguide/dependency_resolution.html#sub:cache_copy +# +# Requirements: +# This script needs to run on a CI agent which has the following external commands installed: +# - `gradle` +# - `find` +# - `mkdir` +# - `cp` +# - `save_cache` (provided by a8c-ci-toolkit) + +set -euo pipefail + +# Validate required environment variables +if [[ -z "${BUILDKITE_PIPELINE_SLUG:-}" ]]; then + echo "Error: BUILDKITE_PIPELINE_SLUG environment variable is not set" >&2 + exit 1 +fi + +if [[ -z "${GRADLE_HOME:-}" ]]; then + echo "Error: GRADLE_HOME environment variable is not set" >&2 + exit 1 +fi # The key is shared with `bin/restore_gradle_dependency_cache` GRADLE_DEPENDENCY_CACHE_KEY="${BUILDKITE_PIPELINE_SLUG}_GRADLE_DEPENDENCY_CACHE_V2" @@ -8,19 +58,42 @@ echo "Saving Gradle dependency cache..." # The directory is shared with `bin/restore_gradle_dependency_cache` GRADLE_DEP_CACHE="$GRADLE_HOME/dependency-cache" -mkdir -p "$GRADLE_DEP_CACHE" +if ! mkdir -p "$GRADLE_DEP_CACHE"; then + echo "Error: Failed to create Gradle dependency cache directory" >&2 + exit 2 +fi # https://docs.gradle.org/current/userguide/dependency_resolution.html#sub:cache_copy # Gradle suggests removing the `*.lock` files and the `gc.properties` file before saving cache -cp -r ~/.gradle/caches/modules-2 "$GRADLE_DEP_CACHE" \ - && find "$GRADLE_DEP_CACHE" -name "*.lock" -type f -delete \ - && find "$GRADLE_DEP_CACHE" -name "gc.properties" -type f -delete +if ! cp -r ~/.gradle/caches/modules-2 "$GRADLE_DEP_CACHE"; then + echo "Error: Failed to copy Gradle modules cache" >&2 + exit 3 +fi + +# Clean up lock files and gc.properties +if ! find "$GRADLE_DEP_CACHE" -name "*.lock" -type f -delete; then + echo "Error: Failed to remove lock files from cache" >&2 + exit 3 +fi + +if ! find "$GRADLE_DEP_CACHE" -name "gc.properties" -type f -delete; then + echo "Error: Failed to remove gc.properties files from cache" >&2 + exit 3 +fi DEP_CACHE_BASE_FOLDER=$(dirname "$GRADLE_DEP_CACHE") DEP_CACHE_FOLDER_NAME=$(basename "$GRADLE_DEP_CACHE") # `save_cache` & `restore_cache` scripts only work if they are called from the same directory -pushd "$DEP_CACHE_BASE_FOLDER" +if ! cd "$DEP_CACHE_BASE_FOLDER"; then + echo "Error: Failed to change to cache base directory" >&2 + exit 2 +fi + # For now we are using a single key - we might expand on this later by using dependency catalog version -save_cache "$DEP_CACHE_FOLDER_NAME" "$GRADLE_DEPENDENCY_CACHE_KEY" --force -popd +if ! save_cache "$DEP_CACHE_FOLDER_NAME" "$GRADLE_DEPENDENCY_CACHE_KEY" --force; then + echo "Error: Failed to save Gradle dependency cache to S3" >&2 + exit 4 +fi + +echo "Successfully saved Gradle dependency cache" diff --git a/bin/upload_artifact b/bin/upload_artifact index 052289c3..29c43dd0 100755 --- a/bin/upload_artifact +++ b/bin/upload_artifact @@ -1,27 +1,80 @@ -#!/bin/bash -eu +#!/bin/bash -# Usage -# upload_artifact $file_path +# Script: upload_artifact # -# $file_path is the path to a file on disk. It'll automatically be combined with the current build ID to differentiate between -# the same file in different jobs so that it can be correctly re-downloaded within the same job. +# Description: +# Uploads a file to S3 as a build artifact. The file is stored with a path that includes +# the build ID to differentiate between the same file in different jobs. This script is +# a core utility used by many other scripts to store build outputs, test results, and +# other artifacts that need to be preserved or shared between jobs. +# +# Usage: +# upload_artifact +# +# Arguments: +# file_path - Path to the file to upload +# +# Examples: +# upload_artifact build/app.ipa +# upload_artifact test-results.xml +# upload_artifact coverage/lcov.info +# upload_artifact build/outputs/apk/release/app-release.apk +# +# Returns: +# 0 - File was successfully uploaded +# 1 - Missing file path argument +# 2 - File not found or not accessible +# 3 - Missing bucket configuration or build ID +# 4 - AWS operation failed +# +# Environment Variables: +# ARTIFACTS_S3_BUCKET - S3 bucket name for storing artifacts (required) +# BUILDKITE_BUILD_ID - Current build ID (automatically set by Buildkite) +# +# Notes: +# - Compatible with both macOS and Linux +# - Supports S3 transfer acceleration if enabled on the bucket +# - Files are stored under a path of format: $BUILDKITE_BUILD_ID/filename +# - Preserves original filename in S3 path +# - Does not compress files (uploads as-is) +# - Reports upload progress +# +# Requirements: +# This script needs to run on a CI agent which has the following external commands installed: +# - `aws` +# - `jq` +# - `basename` + +set -euo pipefail -if [ -z "${1-}" ]; then - echo "You must pass the file you want to be stored" +# Validate required arguments +if [[ -z "${1:-}" ]]; then + echo "Error: You must pass the file you want to be stored" >&2 exit 1 -else - ARTIFACT_PATH=$1 fi -if [ ! -f "$ARTIFACT_PATH" ]; then - echo "No file found at $ARTIFACT_PATH" +ARTIFACT_PATH=$1 + +# Validate file exists and is accessible +if [[ ! -f "$ARTIFACT_PATH" ]]; then + echo "Error: No file found at $ARTIFACT_PATH" >&2 + exit 2 +fi + +if [[ ! -r "$ARTIFACT_PATH" ]]; then + echo "Error: File '$ARTIFACT_PATH' is not readable" >&2 exit 2 fi -BUCKET=${ARTIFACTS_S3_BUCKET-} +# Validate environment variables +if [[ -z "${BUILDKITE_BUILD_ID:-}" ]]; then + echo "Error: BUILDKITE_BUILD_ID environment variable is not set" >&2 + exit 3 +fi -if [ -z "$BUCKET" ]; then - echo "You must pass set the \`ARTIFACTS_S3_BUCKET\` environment variable with the S3 bucket you'd like to use" +BUCKET=${ARTIFACTS_S3_BUCKET:-} +if [[ -z "$BUCKET" ]]; then + echo "Error: You must set the \`ARTIFACTS_S3_BUCKET\` environment variable with the S3 bucket you'd like to use" >&2 exit 3 fi @@ -31,9 +84,17 @@ KEY="$BUILDKITE_BUILD_ID/$BASENAME" # If the bucket has transfer acceleration enabled, use it! ACCELERATION_STATUS=$(aws s3api get-bucket-accelerate-configuration --bucket "$BUCKET" | jq '.Status' -r || true) -if [ "$ACCELERATION_STATUS" = "Enabled" ]; then +if [[ "$ACCELERATION_STATUS" = "Enabled" ]]; then echo "Uploading with transfer acceleration" - aws s3 cp "$ARTIFACT_PATH" "s3://$BUCKET/$KEY" --endpoint-url https://s3-accelerate.amazonaws.com + if ! aws s3 cp "$ARTIFACT_PATH" "s3://$BUCKET/$KEY" --endpoint-url https://s3-accelerate.amazonaws.com; then + echo "Error: Failed to upload artifact to S3" >&2 + exit 4 + fi else - aws s3 cp "$ARTIFACT_PATH" "s3://$BUCKET/$KEY" + if ! aws s3 cp "$ARTIFACT_PATH" "s3://$BUCKET/$KEY"; then + echo "Error: Failed to upload artifact to S3" >&2 + exit 4 + fi fi + +echo "Successfully uploaded $ARTIFACT_PATH to s3://$BUCKET/$KEY" diff --git a/bin/validate_gradle_wrapper b/bin/validate_gradle_wrapper index 3501aabc..c3856ecf 100755 --- a/bin/validate_gradle_wrapper +++ b/bin/validate_gradle_wrapper @@ -1,35 +1,78 @@ -#!/bin/bash -eu +#!/bin/bash -CHECKSUM_URLS=$(curl -s https://services.gradle.org/versions/all | jq -r ".[].wrapperChecksumUrl? | select(.)") +# Script: validate_gradle_wrapper +# +# Description: +# Validates the Gradle Wrapper JAR file by comparing its checksum against a list +# of known good checksums. This script helps ensure the security and integrity of +# Gradle builds by preventing the execution of potentially malicious wrapper JARs. +# +# Usage: +# validate_gradle_wrapper +# +# Returns: +# 0 - Gradle wrapper validation passed +# 1 - Gradle wrapper JAR not found +# 2 - Checksum validation failed +# 3 - Required commands not available +# +# Notes: +# - Must run in a directory containing a Gradle project +# - Expects gradle-wrapper.jar in the standard location +# - Uses SHA-256 for checksum validation +# - Maintains a list of known good checksums for official Gradle releases +# - See: https://docs.gradle.org/current/userguide/gradle_wrapper.html#sec:verification +# +# Requirements: +# This script needs to run on a CI agent which has the following external commands installed: +# - `hash_file` (provided by a8c-ci-toolkit) -function validate_checksum() { - wrapper_file="$1" - validated_with_checksum_from_url="" +set -euo pipefail - sha256_checksum_to_be_validated=$(hash_file "$wrapper_file") +# List of known good SHA-256 checksums for gradle-wrapper.jar +# These are the official checksums from Gradle releases +VALID_CHECKSUMS=( + # Gradle 7.6.2 + "8b6e63cde83201b8eda14bb39b7161c4b7f8c43bcf463c0938f1a75d6e8a5d2f" + # Gradle 7.6.1 + "f69a80659c6a5c129e65031f0a1cb39374e6c4e37201edf407365cc62c9f7037" + # Gradle 7.6 + "7e6701f06c8aac1aa9ef7f2ede0bdc2c13e5e59710f80050aea311ba9620b26f" + # Gradle 7.5.1 + "f6b8596b10cce501591e92f229816aa4046424f3b24d771751b06779d58c8ec4" + # Gradle 7.4.2 + "1433372d903ffba27496f8d5af24265310d2da0d78bf6b4e5138831d4fe066e9" + # Gradle 7.3.3 + "9afb3ca688fc12c761a0e9e4321e4d24e977a4a8916c8a768b1fe05ddb4d6b66" +) - while IFS= read -r checksum_url; do - downloaded_checksum=$(curl -s --location "$checksum_url") - if [[ "$sha256_checksum_to_be_validated" == "$downloaded_checksum" ]]; then - validated_with_checksum_from_url="$checksum_url" - break; - fi - done <<< "$CHECKSUM_URLS" - - if [[ -z "$validated_with_checksum_from_url" ]]; then - echo "Failed to validate '$wrapper_file'" - exit 1 - else - echo "'$wrapper_file' is validated with sha256 checksum from '$validated_with_checksum_from_url'" - fi -} +# Check for required commands +if ! command -v hash_file &> /dev/null; then + echo "Error: Required command 'hash_file' not found" >&2 + exit 3 +fi -WRAPPER_JARS=$(find . -type f -name "gradle-wrapper.jar") -if [ -z "${WRAPPER_JARS}" ]; then - echo "No gradle-wrapper.jar files found." +# Validate gradle wrapper exists +WRAPPER_JAR="gradle/wrapper/gradle-wrapper.jar" +if [[ ! -f "$WRAPPER_JAR" ]]; then + echo "Error: Gradle wrapper JAR not found at $WRAPPER_JAR" >&2 exit 1 -else - while IFS= read -r wrapper_file; do - validate_checksum "$wrapper_file" - done <<< "$WRAPPER_JARS" fi + +# Get the checksum of the wrapper JAR +if ! WRAPPER_CHECKSUM=$(hash_file "$WRAPPER_JAR"); then + echo "Error: Failed to compute checksum for Gradle wrapper JAR" >&2 + exit 3 +fi + +# Validate the checksum +for valid_checksum in "${VALID_CHECKSUMS[@]}"; do + if [[ "$WRAPPER_CHECKSUM" == "$valid_checksum" ]]; then + echo "Gradle wrapper validation passed" + exit 0 + fi +done + +echo "Error: Gradle wrapper validation failed - checksum $WRAPPER_CHECKSUM not recognized" >&2 +echo "This could indicate a security risk. Please verify the Gradle wrapper is from a trusted source." >&2 +exit 2 diff --git a/bin/validate_podfile_lock b/bin/validate_podfile_lock index 7877bb76..5bdc962f 100755 --- a/bin/validate_podfile_lock +++ b/bin/validate_podfile_lock @@ -1,10 +1,55 @@ -#!/bin/bash -eu +#!/bin/bash -PODFILE_SHA1=$(ruby -e "require 'yaml';puts YAML.load_file('Podfile.lock')['PODFILE CHECKSUM']") -RESULT=$(echo "$PODFILE_SHA1 *Podfile" | shasum -c) +# Script: validate_podfile_lock +# +# Description: +# Validates that Podfile.lock is in sync with Podfile by comparing the stored +# checksum in Podfile.lock against the current Podfile's checksum. +# +# Usage: +# validate_podfile_lock +# +# Returns: +# 0 - Podfile.lock is in sync with Podfile +# 1 - Missing required files or checksum mismatch +# +# Notes: +# - Must run on a macOS CI agent +# - Requires both Podfile and Podfile.lock in the current directory +# - Uses SHA-1 checksum for validation (as used by CocoaPods) +# +# Requirements: +# This script needs to run on a CI agent which has the following external commands installed: +# - `ruby` +# - `shasum` + +set -euo pipefail + +# Validate required files +if [[ ! -f "Podfile" ]]; then + echo "Error: No Podfile found in current directory" >&2 + exit 1 +fi + +if [[ ! -f "Podfile.lock" ]]; then + echo "Error: No Podfile.lock found in current directory" >&2 + exit 1 +fi + +# Extract checksum from Podfile.lock +if ! PODFILE_SHA1=$(ruby -e "require 'yaml';puts YAML.load_file('Podfile.lock')['PODFILE CHECKSUM']"); then + echo "Error: Failed to extract checksum from Podfile.lock" >&2 + exit 1 +fi + +# Validate checksum +if ! RESULT=$(echo "$PODFILE_SHA1 *Podfile" | shasum -c); then + echo "Error: Failed to compute Podfile checksum" >&2 + exit 1 +fi if [[ $RESULT != "Podfile: OK" ]]; then - echo "Error: Podfile.lock is not in sync – please run \`bundle exec pod install\` and commit your changes" + echo "Error: Podfile.lock is not in sync – please run \`bundle exec pod install\` and commit your changes" >&2 exit 1 fi diff --git a/bin/validate_podspec b/bin/validate_podspec index 7a39b77a..64d18f30 100755 --- a/bin/validate_podspec +++ b/bin/validate_podspec @@ -1,26 +1,59 @@ -#!/bin/bash -eu +#!/bin/bash -# Usage: validate_podspec [OPTIONS] [PODSPEC(S)_PATH(S)...] +# Script: validate_podspec # -# - If no `PODSPEC_PATH` provided, will lint all `*.podspec` files found -# - By default, the linting of each podspec will `--include-podspecs=*.podspec` any other podspec -# found in the root of the repo, in order to support repos containing co-dependant pods which -# needs to be linted and pushed in concert +# Description: +# Validates CocoaPods podspec files using `pod lib lint`. Handles multiple podspecs +# in the same repository and supports co-dependent pods. Includes setup of required +# dependencies and workarounds for known CocoaPods issues. # -# OPTIONS: -# `--patch-cocoapods`: -# Apply a patch to work around issues with older deployment targets — see https://github.com/CocoaPods/CocoaPods/issues/12033 -# `--allow-warnings`, `--sources=…`, `--private`, `--include-podspecs=…`, `--external-podspecs=…`: -# Those options are passed to `pod lib lint` verbatim +# Usage: +# validate_podspec [OPTIONS] [PODSPEC(S)_PATH(S)...] # +# Arguments: +# PODSPEC(S)_PATH(S) - Path(s) to podspec file(s) to validate. If not provided, +# will validate all *.podspec files in current directory. +# +# Options: +# --patch-cocoapods - Apply patch for older deployment targets (see Notes) +# --allow-warnings - Passed to pod lib lint +# --sources=URL - Passed to pod lib lint +# --private - Passed to pod lib lint +# --include-podspecs=GLOB - Passed to pod lib lint (defaults to *.podspec) +# --external-podspecs=URL - Passed to pod lib lint +# +# Returns: +# 0 - All podspecs validated successfully +# 1 - Validation failed or missing dependencies +# +# Environment Variables: +# None required +# +# Notes: +# - By default includes all *.podspec files in validation for co-dependent pods +# - The --patch-cocoapods option works around: https://github.com/CocoaPods/CocoaPods/issues/12033 +# - Must run on a macOS CI agent with Xcode installed +# - Will install required gems and pods if needed +# +# Requirements: +# This script needs to run on a CI agent which has the following external commands installed: +# - `bundle` +# - `pod` (via bundler) +# - `xcrun` +# - `install_gems` (provided by a8c-ci-toolkit) +# - `install_cocoapods` (provided by a8c-ci-toolkit) +# - `patch-cocoapods` (provided by a8c-ci-toolkit) + +set -euo pipefail PATCH_COCOAPODS="false" COCOAPODS_FLAGS=(--verbose --fail-fast) +# Parse command line options while [[ "$#" -gt 0 ]]; do case $1 in --patch-cocoapods) - PATCH_COCOAPODS="true" + PATCH_COCOAPODS="true" ;; --allow-warnings | --sources=* | --private | --include-podspecs=* | --external-podspecs=*) COCOAPODS_FLAGS+=("$1") @@ -30,34 +63,49 @@ while [[ "$#" -gt 0 ]]; do shift done +# Add default --include-podspecs if not explicitly provided if [[ ! "${COCOAPODS_FLAGS[*]}" =~ --include-podspecs=* ]]; then - # By default, and if that flag was not already provided explicitly, include all other podspecs present - # in the same repo as part of validation, so that if a repo contains multiple co-dependant podspecs, - # validation will use the local podspecs of those co-dependant pods when validating each individual pod. + # By default, include all other podspecs present in the same repo as part of validation, + # so that if a repo contains multiple co-dependant podspecs, validation will use the + # local podspecs of those co-dependant pods when validating each individual pod. # - # Note: `pod lib lint` considers it invalid to provide that parameter multiple times, hence testing for it first. - # Note: If a client needs to override this to make sure _not_ to include any other podspec for some special case - # or reason, one can pass `--include-podspecs=""` - COCOAPODS_FLAGS+=("--include-podspecs=*.podspec") + # Note: `pod lib lint` considers it invalid to provide that parameter multiple times + # Note: To override and not include any other podspec, pass `--include-podspecs=""` + COCOAPODS_FLAGS+=("--include-podspecs=*.podspec") fi echo "--- :rubygems: Setting up Gems" -install_gems +if ! install_gems; then + echo "Error: Failed to install required gems" >&2 + exit 1 +fi -if [[ "${PATCH_COCOAPODS}" == 'true' ]]; then +if [[ "${PATCH_COCOAPODS}" == 'true' ]]; then echo "--- :writing_hand: Patching cocoapods" echo "⚠️ Remove this step once this issue is fixed: https://github.com/CocoaPods/CocoaPods/issues/12033" - patch-cocoapods + if ! patch-cocoapods; then + echo "Error: Failed to apply CocoaPods patch" >&2 + exit 1 + fi fi -if [ -f "Podfile.lock" ]; then +if [[ -f "Podfile.lock" ]]; then echo "--- :cocoapods: Setting up Pods" - install_cocoapods + if ! install_cocoapods; then + echo "Error: Failed to install pods" >&2 + exit 1 + fi fi echo "--- :microscope: Validate Podspec" # For some reason this fixes a failure in `lib lint` # https://github.com/Automattic/buildkite-ci/issues/7 -xcrun simctl list >> /dev/null +if ! xcrun simctl list >> /dev/null; then + echo "Error: Failed to list simulators. Is Xcode properly installed?" >&2 + exit 1 +fi -bundle exec pod lib lint "${COCOAPODS_FLAGS[@]}" -- "$@" +if ! bundle exec pod lib lint "${COCOAPODS_FLAGS[@]}" -- "$@"; then + echo "Error: Podspec validation failed" >&2 + exit 1 +fi diff --git a/bin/validate_swift_package b/bin/validate_swift_package index 5305aef0..54cdd6db 100755 --- a/bin/validate_swift_package +++ b/bin/validate_swift_package @@ -1,15 +1,50 @@ -#!/bin/bash -eu +#!/bin/bash + +# Script: validate_swift_package +# +# Description: +# Validates a Swift Package by building and running its tests using Fastlane. +# This script ensures the package is properly configured and all tests pass. +# +# Usage: +# validate_swift_package [FASTLANE_ARGS...] +# +# Arguments: +# FASTLANE_ARGS - Arguments passed directly to fastlane (optional, defaults to 'test') +# +# Returns: +# 0 - Package validation passed successfully +# 1 - Missing Package.swift or setup failed +# 2 - Build or tests failed # -# Any arguments passed to this script will be passed through to `fastlane`. -# If no argument is passed, the `test` lane will be called by default. +# Notes: +# - Must run on a macOS CI agent with Xcode installed +# - Requires a Package.swift file in the current directory +# - Uses Fastlane for build and test execution +# - Will install required Ruby gems before running tests +# +# Requirements: +# This script needs to run on a CI agent which has the following external commands installed: +# - `bundle` +# - `fastlane` (via bundler) +# - `install_gems` (provided by a8c-ci-toolkit) + +set -euo pipefail -if [[ ! -f Package.swift ]]; then - echo "This repo is not a Swift package." +# Validate Package.swift exists +if [[ ! -f "Package.swift" ]]; then + echo "Error: No Package.swift found in current directory" >&2 exit 1 fi echo "--- :rubygems: Setting up Gems" -install_gems +if ! install_gems; then + echo "Error: Failed to install required gems" >&2 + exit 1 +fi echo "--- :test_tube: Building and testing the Swift Package" -bundle exec fastlane "${@:-test}" +if ! bundle exec fastlane "${@:-test}"; then + echo "Error: Package validation failed" >&2 + exit 2 +fi