16
16
# specific language governing permissions and limitations
17
17
# under the License.
18
18
#
19
- name : Release Apache Spark
19
+
20
+ # This workflow is intended for use in forked repositories
21
+ # when manually dispatching this to create an RC.
22
+ # To enable full release functionality, developers should manually configure
23
+ # the following GitHub Secrets in their repository settings:
24
+ #
25
+ # - ASF_USERNAME:
26
+ # Your Apache Software Foundation (ASF) account ID.
27
+ #
28
+ # - ASF_PASSWORD:
29
+ # The password associated with your ASF account.
30
+ #
31
+ # - GPG_PRIVATE_KEY:
32
+ # Your GPG private key, exported using:
33
+ # gpg --armor --export-secret-keys ABCD1234 > private.key
34
+ # Ensure this key is registered with a public key server.
35
+ # For more details, refer to:
36
+ # https://spark.apache.org/release-process.html#preparing-gpg-key
37
+ #
38
+ # - GPG_PASSPHRASE:
39
+ # The passphrase for your GPG private key.
40
+ #
41
+ # This workflow supports dry runs by default. If the required GitHub Secrets are not provided,
42
+ # only dry runs will be executed.
43
+ #
44
+ # In case something goes wrong during the process and a release candidate (RC) needs to be
45
+ # cleaned up, follow these steps:
46
+ #
47
+ # 1. Revert the RC-related commits, such as:
48
+ # - "Preparing development version 3.5.7-SNAPSHOT"
49
+ # - "Preparing Spark release v3.5.6-rc1"
50
+ #
51
+ # 2. Delete the RC tag from the remote repository, for example:
52
+ # - git push --delete apache v3.5.6-rc1
53
+ #
54
+ # 3. Remove the RC artifacts from SVN:
55
+ # - RC=v3.5.6-rc1 && svn rm https://dist.apache.org/repos/dist/dev/spark/"${RC}"-bin/ -m "Removing RC artifacts."
56
+ # - RC=v3.5.6-rc1 && svn rm https://dist.apache.org/repos/dist/dev/spark/"${RC}"-docs/ -m "Removing RC artifacts."
57
+ #
58
+ # 4. Drop the staging repository if it exists (https://repository.apache.org/#stagingRepositories)
59
+
60
+ name : Release Apache Spark (dryrun and RC)
20
61
21
62
on :
22
63
schedule :
23
64
- cron : ' 0 7 * * *'
24
65
workflow_dispatch :
66
+ inputs :
67
+ branch :
68
+ description : ' Branch to release. Leave it empty to launch a dryrun. Dispatch this workflow only in the forked repository.'
69
+ required : false
70
+ release-version :
71
+ description : ' Release version. Leave it empty to launch a dryrun.'
72
+ required : false
25
73
26
74
jobs :
27
75
release :
28
- name : Release Apache Spark
76
+ name : Release Apache Spark (dryrun and RC)
29
77
runs-on : ubuntu-latest
30
- if : github.repository == 'apache/spark'
78
+ # Do not allow dispatching this workflow manually in the main repo.
79
+ if : ${{ !(github.repository == 'apache/spark' && inputs.branch != '' && inputs.release-version != '') }}
31
80
steps :
32
81
- name : Checkout Spark repository
33
82
uses : actions/checkout@v4
34
83
with :
35
- fetch-depth : 0
36
- - name : Release Apache Spark
84
+ repository : apache/spark
85
+ ref : " ${{ inputs.branch }}"
86
+ - name : Release Apache Spark (dryrun and RC)
37
87
env :
38
- ASF_USERNAME : gurwls223
39
- GIT_NAME : HyukjinKwon
40
- GPG_PASSPHRASE : not_used
41
- SKIP_TAG : 1
42
- ANSWER : y
88
+ GIT_BRANCH : " ${{ inputs.branch }}"
89
+ RELEASE_VERSION : " ${{ inputs.release-version }}"
90
+ GIT_NAME : " ${{ github.actor }}"
91
+ ASF_USERNAME : " ${{ secrets.ASF_USERNAME }}"
92
+ ASF_PASSWORD : " ${{ secrets.ASF_PASSWORD }}"
93
+ GPG_PRIVATE_KEY : " ${{ secrets.GPG_PRIVATE_KEY }}"
94
+ GPG_PASSPHRASE : " ${{ secrets.GPG_PASSPHRASE }}"
43
95
DEBUG_MODE : 1
96
+ ANSWER : y
44
97
run : |
45
- gpg --batch --gen-key <<EOF
98
+ empty_count=0
99
+ non_empty_count=0
100
+ for val in "$GIT_BRANCH" "$RELEASE_VERSION"; do
101
+ if [ -z "$val" ]; then
102
+ empty_count=$((empty_count+1))
103
+ else
104
+ non_empty_count=$((non_empty_count+1))
105
+ fi
106
+ done
107
+
108
+ if [ "$empty_count" -gt 0 ] && [ "$non_empty_count" -gt 0 ]; then
109
+ echo "Error: Either provide all inputs or leave them all empty for a dryrun."
110
+ exit 1
111
+ fi
112
+
113
+ if [ "$empty_count" -eq 2 ]; then
114
+ echo "Dry run mode enabled"
115
+ export DRYRUN_MODE=1
116
+ ASF_PASSWORD="not_used"
117
+ GPG_PRIVATE_KEY="not_used"
118
+ GPG_PASSPHRASE="not_used"
119
+ ASF_USERNAME="gurwls223"
120
+ export SKIP_TAG=1
121
+ unset GIT_BRANCH
122
+ unset RELEASE_VERSION
123
+ else
124
+ echo "Full release mode enabled"
125
+ export DRYRUN_MODE=0
126
+ fi
127
+
128
+ export ASF_PASSWORD GPG_PRIVATE_KEY GPG_PASSPHRASE ASF_USERNAME
129
+ [ -n "$GIT_BRANCH" ] && export GIT_BRANCH
130
+ [ -n "$RELEASE_VERSION" ] && export RELEASE_VERSION
131
+
132
+ if [ "$DRYRUN_MODE" = "1" ]; then
133
+ gpg --batch --gen-key <<EOF
46
134
Key-Type: RSA
47
135
Key-Length: 4096
48
136
Name-Real: Test CI User
@@ -51,32 +139,41 @@ jobs:
51
139
%no-protection
52
140
%commit
53
141
EOF
142
+ else
143
+ gpg --batch --import <<< "$GPG_PRIVATE_KEY"
144
+ fi
54
145
55
- # Paths
56
146
RELEASE_DIR="$PWD"/spark-release
57
147
OUTPUT_DIR="$RELEASE_DIR/output"
58
-
59
- # Ensure release dir exists
60
148
mkdir -p "$RELEASE_DIR"
61
-
62
- # Start the release script in the background
63
- dev/create-release/do-release-docker.sh -d "$RELEASE_DIR" -n &
149
+
150
+ # Start the release process
151
+ CMD="dev/create-release/do-release-docker.sh -d \"$RELEASE_DIR\""
152
+ if [ "$DRYRUN_MODE" = "1" ]; then
153
+ CMD="$CMD -n"
154
+ fi
155
+
156
+ echo "Running release command: $CMD"
157
+
158
+ bash -c "$CMD" &
64
159
RELEASE_PID=$!
65
-
66
- echo "Started release script with PID $RELEASE_PID"
67
-
68
- # Start tailing docker-build.log
69
- sleep 3
70
- tail -F "$RELEASE_DIR/docker-build.log" &
160
+
161
+ # Tail logs during dry run
162
+ LOG_FILE="$RELEASE_DIR/docker-build.log"
163
+ echo "Waiting for log file: $LOG_FILE"
164
+ while [ ! -f "$LOG_FILE" ]; do
165
+ sleep 3
166
+ done
167
+
168
+ echo "Log file found. Starting tail."
169
+ tail -F "$LOG_FILE" &
71
170
TAIL_PID1=$!
72
-
73
- # Start a background job to watch for new *.log files and tail them
171
+
74
172
(
75
173
LOGGED_FILES=()
76
174
while true; do
77
175
for file in "$OUTPUT_DIR"/*.log; do
78
176
[[ -f "$file" ]] || continue
79
- # Check if we are already tailing this file
80
177
if [[ ! " ${LOGGED_FILES[@]} " =~ " ${file} " ]]; then
81
178
echo "Tailing new log file: $file"
82
179
tail -F "$file" &
@@ -87,21 +184,27 @@ jobs:
87
184
done
88
185
) &
89
186
TAIL_PID2=$!
90
-
91
- # Wait for the release script to finish
187
+
92
188
wait $RELEASE_PID
93
-
94
- # Once release is done, kill the tail processes
95
- kill $TAIL_PID1 $TAIL_PID2
189
+ kill $TAIL_PID1 $TAIL_PID2 || true
190
+
191
+ # Zip logs/output
192
+ if [ "$DRYRUN_MODE" = "1" ]; then
193
+ zip logs.zip "$RELEASE_DIR/docker-build.log" "$OUTPUT_DIR/*.log"
194
+ zip -9 output.zip -r "$OUTPUT_DIR"
195
+ else
196
+ zip -P "$ASF_PASSWORD" logs.zip "$RELEASE_DIR/docker-build.log" "$OUTPUT_DIR/*.log"
197
+ zip -9 -P "$ASF_PASSWORD" output.zip -r "$OUTPUT_DIR"
198
+ fi
96
199
- name : Upload logs
97
200
if : always()
98
201
uses : actions/upload-artifact@v4
99
202
with :
100
203
name : build-logs
101
- path : spark-release/docker-build.log
204
+ path : logs.zip
102
205
- name : Upload output
103
206
if : always()
104
207
uses : actions/upload-artifact@v4
105
208
with :
106
209
name : build-output
107
- path : spark-release/ output
210
+ path : output.zip
0 commit comments