Skip to content

Commit 8722683

Browse files
authored
Merge branch 'qualcomm-linux:qcom-next-staging' into qcom-next-staging
2 parents e615cd2 + 8e30800 commit 8722683

9 files changed

Lines changed: 547 additions & 0 deletions

File tree

Lines changed: 90 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,90 @@
1+
name: AWS S3 Helper
2+
description: Upload and download files from AWS S3
3+
4+
inputs:
5+
s3_bucket:
6+
description: S3 Bucket Name
7+
required: true
8+
local_file:
9+
description: Local file paths
10+
required: false
11+
default: ../artifacts/file_list.txt
12+
download_file:
13+
description: Download file paths
14+
required: false
15+
default: ''
16+
mode:
17+
description: Mode of operation (upload/download)
18+
required: true
19+
default: single-upload
20+
21+
outputs:
22+
presigned_url:
23+
description: Pre-signed URL for the uploaded file
24+
value: ${{ steps.sync-data.outputs.presigned_url }}
25+
26+
runs:
27+
using: "composite"
28+
steps:
29+
- name: Sync Data
30+
id: sync-data
31+
shell: bash
32+
env:
33+
UPLOAD_LOCATION: ${{ github.repository_owner }}/${{ github.event.repository.name }}/${{ github.workflow }}/${{ github.head_ref != '' && github.head_ref || github.run_id }}/
34+
run: |
35+
echo "::group::$(printf '__________ %-100s' 'Process' | tr ' ' _)"
36+
case "${{ inputs.mode }}" in
37+
multi-upload)
38+
echo "Uploading files to S3 bucket..."
39+
first_line=true
40+
# Start the JSON object
41+
echo "{" > ${{ github.workspace }}/presigned_urls.json
42+
while IFS= read -r file; do
43+
if [ -f "$file" ]; then
44+
echo "Uploading $file..."
45+
aws s3 cp "$file" s3://${{ inputs.s3_bucket }}/${{ env.UPLOAD_LOCATION }}
46+
echo "Uploaded $file to s3://${{ inputs.s3_bucket }}/${{ env.UPLOAD_LOCATION }}"
47+
echo "Creating Pre-signed URL for $file..."
48+
filename=$(basename "$file")
49+
presigned_url=$(aws s3 presign s3://${{ inputs.s3_bucket }}/${{ env.UPLOAD_LOCATION }}$filename --expires-in 3600)
50+
if [ "$first_line" = true ]; then
51+
first_line=false
52+
else
53+
echo "," >> ${{ github.workspace }}/presigned_urls.json
54+
fi
55+
# Append the pre-signed URL to the file
56+
echo " \"${file}\": \"${presigned_url}\"" >> ${{ github.workspace }}/presigned_urls.json
57+
echo "Pre-signed URL for $file: $presigned_url"
58+
else
59+
echo "Warning: $file does not exist or is not a regular file."
60+
fi
61+
done < "${{ inputs.local_file }}"
62+
# Close the JSON object
63+
echo "}" >> ${{ github.workspace }}/presigned_urls.json
64+
;;
65+
single-upload)
66+
echo "Uploading single file to S3 bucket..."
67+
aws s3 cp "${{ inputs.local_file }}" s3://${{ inputs.s3_bucket }}/${{ env.UPLOAD_LOCATION }}
68+
echo "Uploaded ${{ inputs.local_file }} to s3://${{ inputs.s3_bucket }}/${{ env.UPLOAD_LOCATION }}"
69+
echo "Creating Pre-signed URL for ${{ inputs.local_file }}..."
70+
presigned_url=$(aws s3 presign s3://${{ inputs.s3_bucket }}/${{ env.UPLOAD_LOCATION }}${{ inputs.local_file }} --expires-in 3600)
71+
echo "presigned_url=${presigned_url}" >> "$GITHUB_OUTPUT"
72+
;;
73+
download)
74+
#Download The required file from s3
75+
echo "Downloading files from S3 bucket..."
76+
aws s3 sync s3://${{ inputs.s3_bucket }}/${{ inputs.download_file }} .
77+
;;
78+
*)
79+
echo "Invalid mode. Use 'upload' or 'download'."
80+
exit 1
81+
;;
82+
esac
83+
84+
- name: Upload artifacts
85+
if: ${{ inputs.mode == 'multi-upload' }}
86+
uses: actions/upload-artifact@v4
87+
with:
88+
name: presigned_urls.json
89+
path: ${{ github.workspace }}/presigned_urls.json
90+
retention-days: 1

.github/actions/build/action.yml

Lines changed: 37 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,37 @@
1+
name: Build workspace
2+
description: Build workspace
3+
4+
inputs:
5+
docker_image:
6+
description: Docker image
7+
required: true
8+
default: kmake-image:latest
9+
10+
runs:
11+
using: "composite"
12+
steps:
13+
- name: Download artifacts
14+
shell: bash
15+
run: |
16+
mkdir -p ../artifacts && \
17+
wget -O ../artifacts/ramdisk.gz https://snapshots.linaro.org/member-builds/qcomlt/testimages/arm64/1379/initramfs-test-image-qemuarm64-20230321073831-1379.rootfs.cpio.gz && \
18+
wget -O ../artifacts/systemd-boot-efi.deb http://ports.ubuntu.com/pool/universe/s/systemd/systemd-boot-efi_255.4-1ubuntu8_arm64.deb && \
19+
dpkg-deb -xv ../artifacts/systemd-boot-efi.deb ../artifacts/systemd
20+
21+
- name: Make
22+
shell: bash
23+
run: |
24+
docker run -i --rm \
25+
--user $(id -u):$(id -g) \
26+
--workdir="$PWD" \
27+
-v "$(dirname $PWD)":"$(dirname $PWD)" \
28+
${{ inputs.docker_image }} bash -c "
29+
make O=../kobj defconfig
30+
make O=../kobj -j$(nproc)
31+
make O=../kobj -j$(nproc) dir-pkg INSTALL_MOD_STRIP=1
32+
"
33+
34+
- name: Package DLKM into ramdisk
35+
shell: bash
36+
run: |
37+
(cd ../kobj/tar-install ; find lib/modules | cpio -o -H newc -R +0:+0 | gzip -9 >> ../../artifacts/ramdisk.gz)
Lines changed: 154 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,154 @@
1+
name: Test Action
2+
inputs:
3+
docker_image:
4+
description: Docker image
5+
required: true
6+
default: kmake-image:latest
7+
8+
runs:
9+
using: "composite"
10+
steps:
11+
- name: Process presigned_urls.json
12+
id: process_urls
13+
uses: actions/github-script@v7
14+
with:
15+
script: |
16+
const fs = require('fs');
17+
const p = require('path');
18+
// Helper function to find URL by filename
19+
function findUrlByFilename(filename) {
20+
for (const [path, url] of Object.entries(data)) {
21+
if (path.endsWith(filename)) {
22+
return url;
23+
}
24+
}
25+
return null;
26+
}
27+
const filePath = p.join(process.env.GITHUB_WORKSPACE, 'presigned_urls.json');
28+
if (fs.existsSync(filePath)) {
29+
console.log("File exists");
30+
} else {
31+
console.log("File does not exist");
32+
core.setFailed(`File not found: ${filePath}`);
33+
}
34+
// Read the JSON file
35+
const data = JSON.parse(fs.readFileSync(filePath, 'utf-8'));
36+
// Extract URLs into variables
37+
const modulesTarUrl = findUrlByFilename('modules.tar.xz');
38+
const imageUrl = findUrlByFilename('Image');
39+
const vmlinuxUrl = findUrlByFilename('vmlinux');
40+
const dtbUrl = findUrlByFilename('qcs6490-rb3gen2.dtb');
41+
// Set outputs
42+
core.setOutput('modules_url', modulesTarUrl);
43+
core.setOutput('image_url', imageUrl);
44+
core.setOutput('vmlinux_url', vmlinuxUrl);
45+
core.setOutput('dtb_url', dtbUrl);
46+
console.log(`Modules URL: ${modulesTarUrl}`);
47+
console.log(`Image URL: ${imageUrl}`);
48+
console.log(`Vmlinux URL: ${vmlinuxUrl}`);
49+
console.log(`Dtb URL: ${dtbUrl}`);
50+
51+
- name: Create metadata.json
52+
id: create_metadata
53+
shell: bash
54+
run: |
55+
echo "Creating job definition"
56+
# Create the job definition using the processed URLs
57+
cd ../job_render
58+
docker run -i --rm \
59+
--user "$(id -u):$(id -g)" \
60+
--workdir="$PWD" \
61+
-v "$(dirname "$PWD")":"$(dirname "$PWD")" \
62+
-e dtb_url="${{ steps.process_urls.outputs.dtb_url }}" \
63+
${{ inputs.docker_image }} \
64+
jq '.artifacts["dtbs/qcom/qcs6490-rb3gen2.dtb"] = env.dtb_url' data/metadata.json > temp.json && mv temp.json data/metadata.json
65+
66+
- name: Upload metadata.json
67+
id: upload_metadata
68+
uses: ./.github/actions/aws_s3_helper
69+
with:
70+
local_file: ../job_render/data/metadata.json
71+
s3_bucket: qli-prd-kernel-gh-artifacts
72+
mode: single-upload
73+
74+
- name: Create template json
75+
shell: bash
76+
run: |
77+
echo "Creating job definition"
78+
metadata_url="${{ steps.upload_metadata.outputs.presigned_url }}"
79+
vmlinux_url="${{ steps.process_urls.outputs.vmlinux_url }}"
80+
image_url="${{ steps.process_urls.outputs.image_url }}"
81+
modules_url="${{ steps.process_urls.outputs.modules_url }}"
82+
# Create the job definition using the processed URLs
83+
cd ../job_render
84+
# using metadata_url
85+
docker run -i --rm \
86+
--user "$(id -u):$(id -g)" \
87+
--workdir="$PWD" \
88+
-v "$(dirname "$PWD")":"$(dirname "$PWD")" \
89+
-e metadata_url="$metadata_url" \
90+
${{ inputs.docker_image }} \
91+
jq '.artifacts.metadata = env.metadata_url' data/cloudData.json > temp.json && mv temp.json data/cloudData.json
92+
# using image_url
93+
docker run -i --rm \
94+
--user "$(id -u):$(id -g)" \
95+
--workdir="$PWD" \
96+
-v "$(dirname "$PWD")":"$(dirname "$PWD")" \
97+
-e image_url="$image_url" \
98+
${{ inputs.docker_image }} \
99+
jq '.artifacts.kernel = env.image_url' data/cloudData.json > temp.json && mv temp.json data/cloudData.json
100+
# using vmlinux_url
101+
docker run -i --rm \
102+
--user "$(id -u):$(id -g)" \
103+
--workdir="$PWD" \
104+
-v "$(dirname "$PWD")":"$(dirname "$PWD")" \
105+
-e vmlinux_url="$vmlinux_url" \
106+
${{ inputs.docker_image }} \
107+
jq '.artifacts.vmlinux = env.vmlinux_url' data/cloudData.json > temp.json && mv temp.json data/cloudData.json
108+
# using modules_url
109+
docker run -i --rm \
110+
--user "$(id -u):$(id -g)" \
111+
--workdir="$PWD" \
112+
-v "$(dirname "$PWD")":"$(dirname "$PWD")" \
113+
-e modules_url="$modules_url" \
114+
${{ inputs.docker_image }} \
115+
jq '.artifacts.modules = env.modules_url' data/cloudData.json > temp.json && mv temp.json data/cloudData.json
116+
117+
- name: Update firmware and ramdisk
118+
shell: bash
119+
run: |
120+
cd ../job_render
121+
ramdisk_url="$(aws s3 presign s3://qli-prd-kernel-gh-artifacts/meta-qcom/initramfs-kerneltest-full-image-qcom-armv8a.cpio.gz --expires 7600)"
122+
firmware_url="$(aws s3 presign s3://qli-prd-kernel-gh-artifacts/meta-qcom/initramfs-firmware-rb3gen2-image-qcom-armv8a.cpio.gz --expires 7600)"
123+
# using ramdisk_url
124+
docker run -i --rm \
125+
--user "$(id -u):$(id -g)" \
126+
--workdir="$PWD" \
127+
-v "$(dirname "$PWD")":"$(dirname "$PWD")" \
128+
-e ramdisk_url="$ramdisk_url" \
129+
${{ inputs.docker_image }} \
130+
jq '.artifacts.ramdisk = env.ramdisk_url' data/cloudData.json > temp.json && mv temp.json data/cloudData.json
131+
132+
# using firmware_url
133+
docker run -i --rm \
134+
--user "$(id -u):$(id -g)" \
135+
--workdir="$PWD" \
136+
-v "$(dirname "$PWD")":"$(dirname "$PWD")" \
137+
-e firmware_url="$firmware_url" \
138+
${{ inputs.docker_image }} \
139+
jq '.artifacts.firmware = env.firmware_url' data/cloudData.json > temp.json && mv temp.json data/cloudData.json
140+
141+
- name: Create lava_job_definition
142+
shell: bash
143+
run: |
144+
cd ../job_render
145+
mkdir renders
146+
docker run -i --rm \
147+
--user "$(id -u):$(id -g)" \
148+
--workdir="$PWD" \
149+
-v "$(dirname "$PWD")":"$(dirname "$PWD")" \
150+
${{ inputs.docker_image }} \
151+
sh -c 'export BOOT_METHOD=fastboot && \
152+
export TARGET=qcs6490-rb3gen2 && \
153+
export TARGET_DTB=qcs6490-rb3gen2 && \
154+
python3 lava_Job_definition_generator.py --localjson ./data/cloudData.json'
Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,26 @@
1+
name: Pull docker image
2+
description: Pull docker image
3+
4+
inputs:
5+
image:
6+
description: The docker image to pull
7+
required: true
8+
default: kmake-image:latest
9+
10+
github_token:
11+
description: The GitHub token to use for authentication
12+
required: true
13+
14+
runs:
15+
using: "composite"
16+
steps:
17+
- name: Clone kmake-image
18+
shell: bash
19+
run: |
20+
git clone https://github.com/qualcomm-linux/kmake-image.git
21+
22+
- name: Build docker image
23+
shell: bash
24+
run: |
25+
cd kmake-image
26+
docker build . -t kmake-image

.github/workflows/build.yml

Lines changed: 75 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,75 @@
1+
name: _build
2+
on:
3+
workflow_call:
4+
inputs:
5+
docker_image:
6+
description: Docker image
7+
type: string
8+
required: true
9+
10+
jobs:
11+
build:
12+
runs-on:
13+
group: GHA-Kernel-SelfHosted-RG
14+
labels: [ self-hosted, kernel-prd-u2404-x64-large-od-ephem ]
15+
steps:
16+
- name: Checkout code
17+
uses: actions/checkout@v4
18+
with:
19+
ref: ${{ github.ref }}
20+
fetch-depth: 0
21+
22+
- name: Pull docker image
23+
uses: ./.github/actions/pull_docker_image
24+
with:
25+
image: ${{ inputs.docker_image }}
26+
github_token: ${{ secrets.GITHUB_TOKEN }}
27+
28+
- name: Build workspace
29+
id: build_workspace
30+
uses: ./.github/actions/build
31+
with:
32+
docker_image: ${{ inputs.docker_image }}
33+
34+
- name: Create file list for artifacts upload
35+
run: |
36+
touch ../artifacts/file_list.txt
37+
tar -cJf modules.tar.xz ../kobj/tar-install/lib/modules/
38+
echo "modules.tar.xz" >> ../artifacts/file_list.txt
39+
echo "../kobj/arch/arm64/boot/Image" >> ../artifacts/file_list.txt
40+
echo "../kobj/vmlinux" >> ../artifacts/file_list.txt
41+
echo "../kobj/arch/arm64/boot/dts/qcom/qcs6490-rb3gen2.dtb" >> ../artifacts/file_list.txt
42+
43+
- name: Upload artifacts
44+
uses: ./.github/actions/aws_s3_helper
45+
with:
46+
s3_bucket: qli-prd-kernel-gh-artifacts
47+
aws_access_key_id: ${{ secrets.AWSKEYID }}
48+
aws_secret_access_key: ${{ secrets.AWSACCESSKEY }}
49+
local_file: ../artifacts/file_list.txt
50+
mode: multi-upload
51+
52+
- name: Clean up
53+
run: |
54+
rm -rf ../artifacts
55+
rm -rf ../kobj
56+
rm -rf modules.tar.xz
57+
58+
- name: Update summary
59+
if: success() || failure()
60+
shell: bash
61+
run: |
62+
if [ ${{ steps.build_workspace.outcome }} == 'success' ]; then
63+
echo "Build was successful"
64+
summary=":heavy_check_mark: Build Success"
65+
else
66+
echo "Build failed"
67+
summary=":x: Build Failed"
68+
fi
69+
SUMMARY='
70+
<details><summary><i>Build Summary</i></summary>
71+
72+
'${summary}'
73+
</details>
74+
'
75+
echo -e "$SUMMARY" >> $GITHUB_STEP_SUMMARY

0 commit comments

Comments
 (0)