Skip to content

Commit 3ac4d79

Browse files
authored
Merge pull request #146 from LerianStudio/main
chore: backmerge
2 parents 0af0229 + 9caaf4c commit 3ac4d79

3 files changed

Lines changed: 395 additions & 0 deletions

File tree

.github/dependabot.yml

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -94,6 +94,15 @@ updates:
9494
- "minor"
9595
- "patch"
9696

97+
# AWS actions (OIDC, credentials, S3)
98+
aws:
99+
patterns:
100+
- "aws-actions/*"
101+
update-types:
102+
- "major"
103+
- "minor"
104+
- "patch"
105+
97106
# Miscellaneous third-party utilities
98107
utilities:
99108
patterns:

.github/workflows/s3-upload.yml

Lines changed: 267 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,267 @@
1+
name: "S3 Upload"
2+
3+
# Reusable workflow for uploading files to S3 with environment-based folder routing.
4+
#
5+
# Features:
6+
# - Automatic environment detection from tag suffix (beta → development, rc → staging, release → production)
7+
# - Supports glob patterns for flexible file selection
8+
# - AWS authentication via OIDC (IAM Role)
9+
# - Optional custom S3 prefix within environment folder
10+
# - dry_run mode for previewing uploads without applying
11+
#
12+
# Examples:
13+
# # Upload Casdoor init data
14+
# uses: LerianStudio/github-actions-shared-workflows/.github/workflows/s3-upload.yml@v1.0.0
15+
# with:
16+
# s3_bucket: "lerian-casdoor-init-data"
17+
# file_pattern: "init/casdoor/init_data*.json"
18+
# secrets:
19+
# AWS_ROLE_ARN: ${{ secrets.AWS_INIT_DATA_ROLE_ARN }}
20+
#
21+
# # Upload migration files with custom prefix
22+
# uses: LerianStudio/github-actions-shared-workflows/.github/workflows/s3-upload.yml@v1.0.0
23+
# with:
24+
# s3_bucket: "lerian-migration-files"
25+
# file_pattern: "init/casdoor-migrations/migrations/*.sql"
26+
# s3_prefix: "casdoor-migrations"
27+
# secrets:
28+
# AWS_ROLE_ARN: ${{ secrets.AWS_MIGRATIONS_ROLE_ARN }}
29+
30+
on:
31+
workflow_call:
32+
inputs:
33+
runner_type:
34+
description: 'Runner to use for the workflow'
35+
type: string
36+
default: 'blacksmith-4vcpu-ubuntu-2404'
37+
s3_bucket:
38+
description: 'S3 bucket name (without s3:// prefix)'
39+
type: string
40+
required: true
41+
file_pattern:
42+
description: 'Glob pattern for files to upload (e.g., "init/casdoor/init_data*.json", "migrations/*.sql")'
43+
type: string
44+
required: true
45+
s3_prefix:
46+
description: 'Optional prefix inside the environment folder (e.g., "casdoor-migrations" → development/casdoor-migrations/)'
47+
type: string
48+
default: ''
49+
aws_region:
50+
description: 'AWS region'
51+
type: string
52+
default: 'us-east-2'
53+
environment_detection:
54+
description: 'Environment detection strategy: tag_suffix (auto from tag) or manual'
55+
type: string
56+
default: 'tag_suffix'
57+
manual_environment:
58+
description: 'Manually specify environment (development/staging/production) - only used if environment_detection is manual'
59+
type: string
60+
required: false
61+
strip_prefix:
62+
description: 'Remove this prefix from file paths before uploading (e.g., "components/onboarding/migrations" strips that from the source path)'
63+
type: string
64+
default: ''
65+
flatten:
66+
description: 'Upload files without preserving directory structure (only filenames)'
67+
type: boolean
68+
default: true
69+
dry_run:
70+
description: 'Preview changes without applying them'
71+
type: boolean
72+
required: false
73+
default: false
74+
secrets:
75+
AWS_ROLE_ARN:
76+
description: 'ARN of the IAM role to assume for S3 access'
77+
required: true
78+
workflow_dispatch:
79+
inputs:
80+
runner_type:
81+
description: 'Runner to use for the workflow'
82+
type: string
83+
default: 'blacksmith-4vcpu-ubuntu-2404'
84+
s3_bucket:
85+
description: 'S3 bucket name (without s3:// prefix)'
86+
type: string
87+
required: true
88+
file_pattern:
89+
description: 'Glob pattern for files to upload (e.g., "init/casdoor/init_data*.json", "migrations/*.sql")'
90+
type: string
91+
required: true
92+
s3_prefix:
93+
description: 'Optional prefix inside the environment folder'
94+
type: string
95+
default: ''
96+
aws_region:
97+
description: 'AWS region'
98+
type: string
99+
default: 'us-east-2'
100+
environment_detection:
101+
description: 'Environment detection strategy: tag_suffix (auto from tag) or manual'
102+
type: string
103+
default: 'tag_suffix'
104+
manual_environment:
105+
description: 'Manually specify environment (development/staging/production)'
106+
type: string
107+
required: false
108+
strip_prefix:
109+
description: 'Remove this prefix from file paths before uploading'
110+
type: string
111+
default: ''
112+
flatten:
113+
description: 'Upload files without preserving directory structure (only filenames)'
114+
type: boolean
115+
default: true
116+
dry_run:
117+
description: 'Dry run — preview uploads without applying them'
118+
type: boolean
119+
default: false
120+
121+
permissions:
122+
id-token: write
123+
contents: read
124+
125+
jobs:
126+
upload:
127+
runs-on: ${{ inputs.runner_type }}
128+
steps:
129+
- uses: actions/checkout@v4
130+
131+
- name: Determine environment
132+
id: env
133+
env:
134+
INPUT_ENV_DETECTION: ${{ inputs.environment_detection }}
135+
INPUT_MANUAL_ENV: ${{ inputs.manual_environment }}
136+
run: |
137+
set -euo pipefail
138+
139+
if [[ "$INPUT_ENV_DETECTION" == "manual" ]]; then
140+
case "$INPUT_MANUAL_ENV" in
141+
development|staging|production)
142+
FOLDER="$INPUT_MANUAL_ENV"
143+
;;
144+
*)
145+
echo "::error::manual_environment must be one of: development, staging, production (got '${INPUT_MANUAL_ENV}')"
146+
exit 1
147+
;;
148+
esac
149+
elif [[ "$INPUT_ENV_DETECTION" == "tag_suffix" ]]; then
150+
REF="${GITHUB_REF#refs/}"
151+
TAG="${GITHUB_REF#refs/tags/}"
152+
153+
if [[ "$REF" == heads/develop ]] || [[ "$TAG" == *-beta* ]]; then
154+
FOLDER="development"
155+
elif [[ "$REF" == heads/release-candidate ]] || [[ "$TAG" == *-rc* ]]; then
156+
FOLDER="staging"
157+
elif [[ "$REF" == heads/main ]] || [[ "$TAG" =~ ^v[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
158+
FOLDER="production"
159+
else
160+
echo "⚠️ Ref '$REF' does not match any environment. Skipping upload."
161+
FOLDER=""
162+
fi
163+
else
164+
echo "::error::environment_detection must be one of: tag_suffix, manual (got '${INPUT_ENV_DETECTION}')"
165+
exit 1
166+
fi
167+
168+
echo "folder=${FOLDER}" >> "$GITHUB_OUTPUT"
169+
[[ -n "$FOLDER" ]] && echo "📁 Environment: ${FOLDER}" || echo "⚠️ No environment matched"
170+
171+
- name: Configure AWS credentials
172+
if: steps.env.outputs.folder != ''
173+
uses: aws-actions/configure-aws-credentials@v4
174+
with:
175+
role-to-assume: ${{ secrets.AWS_ROLE_ARN }}
176+
aws-region: ${{ inputs.aws_region }}
177+
178+
- name: Dry run summary
179+
if: steps.env.outputs.folder != '' && inputs.dry_run
180+
env:
181+
BUCKET: ${{ inputs.s3_bucket }}
182+
FOLDER: ${{ steps.env.outputs.folder }}
183+
PREFIX: ${{ inputs.s3_prefix }}
184+
PATTERN: ${{ inputs.file_pattern }}
185+
FLATTEN: ${{ inputs.flatten }}
186+
STRIP_PREFIX: ${{ inputs.strip_prefix }}
187+
run: |
188+
set -euo pipefail
189+
190+
echo "::notice::DRY RUN — no files will be uploaded"
191+
echo " bucket : ${BUCKET}"
192+
echo " folder : ${FOLDER}"
193+
echo " prefix : ${PREFIX:-<none>}"
194+
echo " strip_prefix : ${STRIP_PREFIX:-<none>}"
195+
echo " pattern : ${PATTERN}"
196+
echo " flatten : ${FLATTEN}"
197+
198+
if [[ -n "$PREFIX" ]]; then
199+
S3_PATH="s3://${BUCKET}/${FOLDER}/${PREFIX}/"
200+
else
201+
S3_PATH="s3://${BUCKET}/${FOLDER}/"
202+
fi
203+
204+
shopt -s nullglob
205+
FILE_COUNT=0
206+
207+
for file in $PATTERN; do
208+
if [[ "$FLATTEN" == "true" ]]; then
209+
echo " [dry-run] aws s3 cp $file ${S3_PATH}$(basename "$file")"
210+
elif [[ -n "$STRIP_PREFIX" ]]; then
211+
DEST_PATH="${file#"$STRIP_PREFIX"/}"
212+
echo " [dry-run] aws s3 cp $file ${S3_PATH}${DEST_PATH}"
213+
else
214+
echo " [dry-run] aws s3 cp $file ${S3_PATH}${file}"
215+
fi
216+
FILE_COUNT=$((FILE_COUNT + 1))
217+
done
218+
219+
if [[ $FILE_COUNT -eq 0 ]]; then
220+
echo "::warning::No files matched pattern: ${PATTERN}"
221+
exit 1
222+
fi
223+
224+
echo "::notice::Would upload ${FILE_COUNT} file(s) to ${S3_PATH}"
225+
226+
- name: Upload files to S3
227+
if: steps.env.outputs.folder != '' && !inputs.dry_run
228+
env:
229+
BUCKET: ${{ inputs.s3_bucket }}
230+
FOLDER: ${{ steps.env.outputs.folder }}
231+
PREFIX: ${{ inputs.s3_prefix }}
232+
PATTERN: ${{ inputs.file_pattern }}
233+
FLATTEN: ${{ inputs.flatten }}
234+
STRIP_PREFIX: ${{ inputs.strip_prefix }}
235+
run: |
236+
set -euo pipefail
237+
238+
# Build S3 destination path
239+
if [[ -n "$PREFIX" ]]; then
240+
S3_PATH="s3://${BUCKET}/${FOLDER}/${PREFIX}/"
241+
else
242+
S3_PATH="s3://${BUCKET}/${FOLDER}/"
243+
fi
244+
245+
# Find and upload files
246+
shopt -s nullglob
247+
FILE_COUNT=0
248+
249+
for file in $PATTERN; do
250+
if [[ "$FLATTEN" == "true" ]]; then
251+
aws s3 cp "$file" "${S3_PATH}"
252+
elif [[ -n "$STRIP_PREFIX" ]]; then
253+
DEST_PATH="${file#"$STRIP_PREFIX"/}"
254+
aws s3 cp "$file" "${S3_PATH}${DEST_PATH}"
255+
else
256+
aws s3 cp "$file" "${S3_PATH}${file}"
257+
fi
258+
259+
FILE_COUNT=$((FILE_COUNT + 1))
260+
done
261+
262+
if [[ $FILE_COUNT -eq 0 ]]; then
263+
echo "⚠️ No files matched pattern: ${PATTERN}"
264+
exit 1
265+
fi
266+
267+
echo "::notice::Uploaded ${FILE_COUNT} file(s) to ${S3_PATH}"

0 commit comments

Comments
 (0)