diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 2b327e99..6ab52460 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -93,6 +93,15 @@ updates: - "minor" - "patch" + # AWS actions (OIDC, credentials, S3) + aws: + patterns: + - "aws-actions/*" + update-types: + - "major" + - "minor" + - "patch" + # Miscellaneous third-party utilities utilities: patterns: diff --git a/.github/workflows/s3-upload.yml b/.github/workflows/s3-upload.yml new file mode 100644 index 00000000..a9dbf679 --- /dev/null +++ b/.github/workflows/s3-upload.yml @@ -0,0 +1,267 @@ +name: "S3 Upload" + +# Reusable workflow for uploading files to S3 with environment-based folder routing. +# +# Features: +# - Automatic environment detection from tag suffix (beta → development, rc → staging, release → production) +# - Supports glob patterns for flexible file selection +# - AWS authentication via OIDC (IAM Role) +# - Optional custom S3 prefix within environment folder +# - dry_run mode for previewing uploads without applying +# +# Examples: +# # Upload Casdoor init data +# uses: LerianStudio/github-actions-shared-workflows/.github/workflows/s3-upload.yml@v1.0.0 +# with: +# s3_bucket: "lerian-casdoor-init-data" +# file_pattern: "init/casdoor/init_data*.json" +# secrets: +# AWS_ROLE_ARN: ${{ secrets.AWS_INIT_DATA_ROLE_ARN }} +# +# # Upload migration files with custom prefix +# uses: LerianStudio/github-actions-shared-workflows/.github/workflows/s3-upload.yml@v1.0.0 +# with: +# s3_bucket: "lerian-migration-files" +# file_pattern: "init/casdoor-migrations/migrations/*.sql" +# s3_prefix: "casdoor-migrations" +# secrets: +# AWS_ROLE_ARN: ${{ secrets.AWS_MIGRATIONS_ROLE_ARN }} + +on: + workflow_call: + inputs: + runner_type: + description: 'Runner to use for the workflow' + type: string + default: 'blacksmith-4vcpu-ubuntu-2404' + s3_bucket: + description: 'S3 bucket name (without s3:// prefix)' + type: string + required: true + file_pattern: + description: 'Glob pattern for files to upload (e.g., "init/casdoor/init_data*.json", "migrations/*.sql")' + type: string + required: true + s3_prefix: + description: 'Optional prefix inside the environment folder (e.g., "casdoor-migrations" → development/casdoor-migrations/)' + type: string + default: '' + aws_region: + description: 'AWS region' + type: string + default: 'us-east-2' + environment_detection: + description: 'Environment detection strategy: tag_suffix (auto from tag) or manual' + type: string + default: 'tag_suffix' + manual_environment: + description: 'Manually specify environment (development/staging/production) - only used if environment_detection is manual' + type: string + required: false + strip_prefix: + description: 'Remove this prefix from file paths before uploading (e.g., "components/onboarding/migrations" strips that from the source path)' + type: string + default: '' + flatten: + description: 'Upload files without preserving directory structure (only filenames)' + type: boolean + default: true + dry_run: + description: 'Preview changes without applying them' + type: boolean + required: false + default: false + secrets: + AWS_ROLE_ARN: + description: 'ARN of the IAM role to assume for S3 access' + required: true + workflow_dispatch: + inputs: + runner_type: + description: 'Runner to use for the workflow' + type: string + default: 'blacksmith-4vcpu-ubuntu-2404' + s3_bucket: + description: 'S3 bucket name (without s3:// prefix)' + type: string + required: true + file_pattern: + description: 'Glob pattern for files to upload (e.g., "init/casdoor/init_data*.json", "migrations/*.sql")' + type: string + required: true + s3_prefix: + description: 'Optional prefix inside the environment folder' + type: string + default: '' + aws_region: + description: 'AWS region' + type: string + default: 'us-east-2' + environment_detection: + description: 'Environment detection strategy: tag_suffix (auto from tag) or manual' + type: string + default: 'tag_suffix' + manual_environment: + description: 'Manually specify environment (development/staging/production)' + type: string + required: false + strip_prefix: + description: 'Remove this prefix from file paths before uploading' + type: string + default: '' + flatten: + description: 'Upload files without preserving directory structure (only filenames)' + type: boolean + default: true + dry_run: + description: 'Dry run — preview uploads without applying them' + type: boolean + default: false + +permissions: + id-token: write + contents: read + +jobs: + upload: + runs-on: ${{ inputs.runner_type }} + steps: + - uses: actions/checkout@v4 + + - name: Determine environment + id: env + env: + INPUT_ENV_DETECTION: ${{ inputs.environment_detection }} + INPUT_MANUAL_ENV: ${{ inputs.manual_environment }} + run: | + set -euo pipefail + + if [[ "$INPUT_ENV_DETECTION" == "manual" ]]; then + case "$INPUT_MANUAL_ENV" in + development|staging|production) + FOLDER="$INPUT_MANUAL_ENV" + ;; + *) + echo "::error::manual_environment must be one of: development, staging, production (got '${INPUT_MANUAL_ENV}')" + exit 1 + ;; + esac + elif [[ "$INPUT_ENV_DETECTION" == "tag_suffix" ]]; then + REF="${GITHUB_REF#refs/}" + TAG="${GITHUB_REF#refs/tags/}" + + if [[ "$REF" == heads/develop ]] || [[ "$TAG" == *-beta* ]]; then + FOLDER="development" + elif [[ "$REF" == heads/release-candidate ]] || [[ "$TAG" == *-rc* ]]; then + FOLDER="staging" + elif [[ "$REF" == heads/main ]] || [[ "$TAG" =~ ^v[0-9]+\.[0-9]+\.[0-9]+$ ]]; then + FOLDER="production" + else + echo "⚠️ Ref '$REF' does not match any environment. Skipping upload." + FOLDER="" + fi + else + echo "::error::environment_detection must be one of: tag_suffix, manual (got '${INPUT_ENV_DETECTION}')" + exit 1 + fi + + echo "folder=${FOLDER}" >> "$GITHUB_OUTPUT" + [[ -n "$FOLDER" ]] && echo "📁 Environment: ${FOLDER}" || echo "⚠️ No environment matched" + + - name: Configure AWS credentials + if: steps.env.outputs.folder != '' + uses: aws-actions/configure-aws-credentials@v4 + with: + role-to-assume: ${{ secrets.AWS_ROLE_ARN }} + aws-region: ${{ inputs.aws_region }} + + - name: Dry run summary + if: steps.env.outputs.folder != '' && inputs.dry_run + env: + BUCKET: ${{ inputs.s3_bucket }} + FOLDER: ${{ steps.env.outputs.folder }} + PREFIX: ${{ inputs.s3_prefix }} + PATTERN: ${{ inputs.file_pattern }} + FLATTEN: ${{ inputs.flatten }} + STRIP_PREFIX: ${{ inputs.strip_prefix }} + run: | + set -euo pipefail + + echo "::notice::DRY RUN — no files will be uploaded" + echo " bucket : ${BUCKET}" + echo " folder : ${FOLDER}" + echo " prefix : ${PREFIX:-}" + echo " strip_prefix : ${STRIP_PREFIX:-}" + echo " pattern : ${PATTERN}" + echo " flatten : ${FLATTEN}" + + if [[ -n "$PREFIX" ]]; then + S3_PATH="s3://${BUCKET}/${FOLDER}/${PREFIX}/" + else + S3_PATH="s3://${BUCKET}/${FOLDER}/" + fi + + shopt -s nullglob + FILE_COUNT=0 + + for file in $PATTERN; do + if [[ "$FLATTEN" == "true" ]]; then + echo " [dry-run] aws s3 cp $file ${S3_PATH}$(basename "$file")" + elif [[ -n "$STRIP_PREFIX" ]]; then + DEST_PATH="${file#"$STRIP_PREFIX"/}" + echo " [dry-run] aws s3 cp $file ${S3_PATH}${DEST_PATH}" + else + echo " [dry-run] aws s3 cp $file ${S3_PATH}${file}" + fi + FILE_COUNT=$((FILE_COUNT + 1)) + done + + if [[ $FILE_COUNT -eq 0 ]]; then + echo "::warning::No files matched pattern: ${PATTERN}" + exit 1 + fi + + echo "::notice::Would upload ${FILE_COUNT} file(s) to ${S3_PATH}" + + - name: Upload files to S3 + if: steps.env.outputs.folder != '' && !inputs.dry_run + env: + BUCKET: ${{ inputs.s3_bucket }} + FOLDER: ${{ steps.env.outputs.folder }} + PREFIX: ${{ inputs.s3_prefix }} + PATTERN: ${{ inputs.file_pattern }} + FLATTEN: ${{ inputs.flatten }} + STRIP_PREFIX: ${{ inputs.strip_prefix }} + run: | + set -euo pipefail + + # Build S3 destination path + if [[ -n "$PREFIX" ]]; then + S3_PATH="s3://${BUCKET}/${FOLDER}/${PREFIX}/" + else + S3_PATH="s3://${BUCKET}/${FOLDER}/" + fi + + # Find and upload files + shopt -s nullglob + FILE_COUNT=0 + + for file in $PATTERN; do + if [[ "$FLATTEN" == "true" ]]; then + aws s3 cp "$file" "${S3_PATH}" + elif [[ -n "$STRIP_PREFIX" ]]; then + DEST_PATH="${file#"$STRIP_PREFIX"/}" + aws s3 cp "$file" "${S3_PATH}${DEST_PATH}" + else + aws s3 cp "$file" "${S3_PATH}${file}" + fi + + FILE_COUNT=$((FILE_COUNT + 1)) + done + + if [[ $FILE_COUNT -eq 0 ]]; then + echo "⚠️ No files matched pattern: ${PATTERN}" + exit 1 + fi + + echo "::notice::Uploaded ${FILE_COUNT} file(s) to ${S3_PATH}" diff --git a/docs/s3-upload.md b/docs/s3-upload.md new file mode 100644 index 00000000..f210bcb5 --- /dev/null +++ b/docs/s3-upload.md @@ -0,0 +1,119 @@ + + + + + +
Lerian

s3-upload

+ +Reusable workflow for uploading files to AWS S3 with automatic environment-based folder routing and OIDC authentication. + +## What it does + +Uploads files matching a glob pattern to an S3 bucket, organized by environment folder. The environment is detected automatically from the git ref/tag or can be set manually. + +| Ref / Tag | Environment folder | +|---|---| +| `develop` branch or `*-beta*` tag | `development/` | +| `release-candidate` branch or `*-rc*` tag | `staging/` | +| `main` branch or `vX.Y.Z` tag | `production/` | + +## Inputs + +| Input | Type | Required | Default | Description | +|---|---|:---:|---|---| +| `runner_type` | `string` | No | `blacksmith-4vcpu-ubuntu-2404` | Runner to use for the workflow | +| `s3_bucket` | `string` | **Yes** | — | S3 bucket name (without `s3://` prefix) | +| `file_pattern` | `string` | **Yes** | — | Glob pattern for files to upload | +| `s3_prefix` | `string` | No | `""` | Optional prefix inside the environment folder | +| `aws_region` | `string` | No | `us-east-2` | AWS region | +| `environment_detection` | `string` | No | `tag_suffix` | Detection strategy: `tag_suffix` or `manual` | +| `manual_environment` | `string` | No | — | Environment override: `development`, `staging`, or `production` | +| `flatten` | `boolean` | No | `true` | Upload only filenames (discard directory structure) | +| `dry_run` | `boolean` | No | `false` | Preview uploads without applying them | + +## Secrets + +| Secret | Required | Description | +|---|---|---| +| `AWS_ROLE_ARN` | **Yes** | ARN of the IAM role to assume via OIDC for S3 access | + +## Usage + +### Upload init data files + +```yaml +jobs: + upload: + uses: LerianStudio/github-actions-shared-workflows/.github/workflows/s3-upload.yml@v1.0.0 + with: + s3_bucket: "lerian-casdoor-init-data" + file_pattern: "init/casdoor/init_data*.json" + secrets: + AWS_ROLE_ARN: ${{ secrets.AWS_INIT_DATA_ROLE_ARN }} +``` + +### Upload migration files with custom prefix + +```yaml +jobs: + upload: + uses: LerianStudio/github-actions-shared-workflows/.github/workflows/s3-upload.yml@v1.0.0 + with: + s3_bucket: "lerian-migration-files" + file_pattern: "init/casdoor-migrations/migrations/*.sql" + s3_prefix: "casdoor-migrations" + secrets: + AWS_ROLE_ARN: ${{ secrets.AWS_MIGRATIONS_ROLE_ARN }} +``` + +### Dry run (preview only) + +```yaml +# Use @develop or your feature branch to validate before releasing +jobs: + preview: + uses: LerianStudio/github-actions-shared-workflows/.github/workflows/s3-upload.yml@develop + with: + s3_bucket: "lerian-casdoor-init-data" + file_pattern: "init/casdoor/init_data*.json" + dry_run: true + secrets: + AWS_ROLE_ARN: ${{ secrets.AWS_INIT_DATA_ROLE_ARN }} +``` + +### Manual environment override + +```yaml +jobs: + upload: + uses: LerianStudio/github-actions-shared-workflows/.github/workflows/s3-upload.yml@v1.0.0 + with: + s3_bucket: "lerian-casdoor-init-data" + file_pattern: "init/casdoor/init_data*.json" + environment_detection: "manual" + manual_environment: "staging" + secrets: + AWS_ROLE_ARN: ${{ secrets.AWS_INIT_DATA_ROLE_ARN }} +``` + +### Preserve directory structure + +```yaml +jobs: + upload: + uses: LerianStudio/github-actions-shared-workflows/.github/workflows/s3-upload.yml@v1.0.0 + with: + s3_bucket: "lerian-migration-files" + file_pattern: "init/casdoor-migrations/migrations/*.sql" + flatten: false + secrets: + AWS_ROLE_ARN: ${{ secrets.AWS_MIGRATIONS_ROLE_ARN }} +``` + +## Permissions + +```yaml +permissions: + id-token: write + contents: read +```