Add server/pxf-jdbc-drivers #172
Workflow file for this run
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| name: PXF CI | |
| on: | |
| push: | |
| branches: [ main, MDB_STABLE ] | |
| pull_request: | |
| branches: [ main, MDB_STABLE ] | |
| jobs: | |
| build_gpdb_debs: | |
| name: Build open-gpdb Debian Packages | |
| runs-on: ubuntu-22.04 | |
| strategy: | |
| fail-fast: false | |
| matrix: | |
| target: # make targets | |
| - 'deb-gpdb-bionic' | |
| - 'deb-gpdb-jammy' | |
| steps: | |
| - name: Get Date | |
| id: get-date | |
| run: echo "week=$(/bin/date -u '+%U')" >> "$GITHUB_OUTPUT" | |
| - name: database deb files caching | |
| id: cache-debs | |
| uses: actions/cache@v4 | |
| with: | |
| path: ./downloads/*.deb | |
| # save per-os with 7 days TTL | |
| key: ${{ runner.os }}-${{ matrix.target }}-${{ steps.get-date.outputs.week }} | |
| - name: Checkout code | |
| uses: actions/checkout@v4 | |
| with: | |
| submodules: true | |
| - name: Build open-gpdb | |
| if: steps.cache-debs.outputs.cache-hit != 'true' | |
| run: make -C package ${{ matrix.target }} | |
| build_pxf_debs: | |
| name: Build PXF Debian Packages | |
| runs-on: ubuntu-22.04 | |
| needs: [build_gpdb_debs] | |
| strategy: | |
| fail-fast: false | |
| matrix: | |
| target: # make targets | |
| - pxf: 'deb-pxf6-gpdb-bionic' | |
| db-cache-key-sfx: 'deb-gpdb-bionic' | |
| - pxf: 'deb-pxf6-gpdb-jammy' | |
| db-cache-key-sfx: 'deb-gpdb-jammy' | |
| steps: | |
| - name: Get Date | |
| id: get-date | |
| run: echo "week=$(/bin/date -u '+%U')" >> "$GITHUB_OUTPUT" | |
| - name: Checkout code | |
| uses: actions/checkout@v4 | |
| with: | |
| submodules: true | |
| - name: (restore) database deb files caching | |
| id: cache-debs | |
| uses: actions/cache/restore@v4 | |
| with: | |
| fail-on-cache-miss: true | |
| path: ./downloads/*.deb | |
| key: ${{ runner.os }}-${{ matrix.target.db-cache-key-sfx }}-${{ steps.get-date.outputs.week }} | |
| - name: Build PXF6 | |
| run: make -C package ${{ matrix.target.pxf }} | |
| - name: (save) PXF debs in cache | |
| uses: actions/cache/save@v4 | |
| id: cache | |
| with: | |
| path: ./downloads/*pxf*.deb | |
| key: ${{ runner.os }}-${{ matrix.target.pxf }}-${{ github.sha }} | |
| pxf-test: | |
| name: Automation tests | |
| runs-on: ubuntu-22.04 | |
| needs: [build_pxf_debs] | |
| strategy: | |
| fail-fast: false | |
| matrix: | |
| target: | |
| - pxf: 'deb-pxf6-gpdb-bionic' | |
| db-cache-key-sfx: 'deb-gpdb-bionic' | |
| test_group: | |
| - cli | |
| - external-table | |
| - sanity | |
| - smoke | |
| - hdfs | |
| - hcatalog | |
| - hcfs | |
| - hive | |
| - hbase | |
| - profile | |
| - jdbc | |
| - proxy | |
| - unused | |
| - s3 | |
| - features | |
| - gpdb | |
| steps: | |
| - name: Get Date | |
| id: get-date | |
| run: echo "week=$(/bin/date -u '+%U')" >> "$GITHUB_OUTPUT" | |
| - name: Checkout code | |
| uses: actions/checkout@v4 | |
| with: | |
| submodules: true | |
| - name: (restore) database deb files caching | |
| id: cache-debs-db | |
| uses: actions/cache/restore@v4 | |
| with: | |
| fail-on-cache-miss: true | |
| path: ./downloads/*.deb | |
| key: ${{ runner.os }}-${{ matrix.target.db-cache-key-sfx }}-${{ steps.get-date.outputs.week }} | |
| - name: (restore) PXF deb files caching | |
| id: cache-debs-pxf | |
| uses: actions/cache/restore@v4 | |
| with: | |
| fail-on-cache-miss: true | |
| path: ./downloads/*pxf*.deb | |
| key: ${{ runner.os }}-${{ matrix.target.pxf }}-${{ github.sha }} | |
| - name: Use PXF builds | |
| run: ls -lah ./downloads/ | |
| - name: Build automation images | |
| if: matrix.target.pxf == 'deb-pxf6-gpdb-bionic' # FIXME: support other PXF versions | |
| continue-on-error: true | |
| timeout-minutes: 120 | |
| run: | | |
| echo "Building automation images..." | |
| cd automation | |
| make copy-debs | |
| docker compose build singlecluster | |
| docker compose build universe | |
| docker compose up -d | |
| docker exec universe bash -lc "/entrypoint.sh" | |
| - name: Run Test - ${{ matrix.test_group }} | |
| id: run_test | |
| continue-on-error: true | |
| timeout-minutes: 120 | |
| run: | | |
| docker exec universe bash -lc "/run_tests.sh ${{ matrix.test_group }}" | |
| - name: Collect artifacts and generate stats | |
| if: always() | |
| id: collect_artifacts | |
| run: | | |
| mkdir -p artifacts/logs | |
| TEST_GROUP="${{ matrix.test_group }}" | |
| TEST_RESULT="${{ steps.run_test.outcome }}" | |
| # Initialize counters | |
| TOTAL=0 | |
| PASSED=0 | |
| FAILED=0 | |
| SKIPPED=0 | |
| # Copy test artifacts | |
| docker exec universe bash -c "cp -r /home/gpadmin/workspace/pxf/automation/automation_logs/ /tmp/pxf-logs/ 2>/dev/null || true" || true | |
| docker exec universe bash -c "cp -r /home/gpadmin/workspace/pxf/automation/log/ /tmp/pxf-logs/ 2>/dev/null || true" || true | |
| docker exec universe bash -c "cp -r /home/gpadmin/workspace/pxf/automation/sqlrepo/ /tmp/pxf-logs/ 2>/dev/null || true" || true | |
| docker exec universe bash -c "cp -r /home/gpadmin/workspace/pxf/automation/target/surefire-reports/ /tmp/pxf-logs/ 2>/dev/null || true" || true | |
| docker exec universe bash -c "cp -r /home/gpadmin/workspace/singlecluster/storage/logs/ /tmp/pxf-logs/ 2>/dev/null || true" || true | |
| docker cp universe:/tmp/pxf-logs artifacts/logs/ 2>/dev/null || true | |
| # Parse surefire reports for automation tests | |
| if [[ "$TEST_GROUP" != "cli" && "$TEST_GROUP" != "server" ]]; then | |
| for xml in artifacts/logs/pxf-logs/surefire-reports/TEST-*.xml; do | |
| if [ -f "$xml" ]; then | |
| tests=$(grep -oP 'tests="\K\d+' "$xml" 2>/dev/null | head -1 || echo "0") | |
| failures=$(grep -oP 'failures="\K\d+' "$xml" 2>/dev/null | head -1 || echo "0") | |
| errors=$(grep -oP 'errors="\K\d+' "$xml" 2>/dev/null | head -1 || echo "0") | |
| skipped=$(grep -oP 'skipped="\K\d+' "$xml" 2>/dev/null | head -1 || echo "0") | |
| TOTAL=$((TOTAL + tests)) | |
| FAILED=$((FAILED + failures + errors)) | |
| SKIPPED=$((SKIPPED + skipped)) | |
| fi | |
| done | |
| PASSED=$((TOTAL - FAILED - SKIPPED)) | |
| fi | |
| # Generate stats JSON | |
| cat > artifacts/test_stats.json <<EOF | |
| { | |
| "group": "$TEST_GROUP", | |
| "result": "$TEST_RESULT", | |
| "total": $TOTAL, | |
| "passed": $PASSED, | |
| "failed": $FAILED, | |
| "skipped": $SKIPPED | |
| } | |
| EOF | |
| echo "failed_count=$FAILED" >> $GITHUB_OUTPUT | |
| echo "skipped_count=$SKIPPED" >> $GITHUB_OUTPUT | |
| echo "Test stats for $TEST_GROUP: total=$TOTAL, passed=$PASSED, failed=$FAILED, skipped=$SKIPPED" | |
| - name: Cleanup containers | |
| if: always() | |
| run: | | |
| docker compose -f automation/docker-compose.yml down -v || true | |
| - name: Upload test artifacts | |
| if: always() | |
| uses: actions/upload-artifact@v4 | |
| with: | |
| name: test-results-${{ matrix.test_group }} | |
| path: artifacts/** | |
| if-no-files-found: ignore | |
| retention-days: 7 | |
| - name: Check test result | |
| if: always() | |
| run: | | |
| FAILED_COUNT="${{ steps.collect_artifacts.outputs.failed_count || 0 }}" | |
| SKIPPED_COUNT="${{ steps.collect_artifacts.outputs.skipped_count || 0 }}" | |
| if [ "${{ steps.run_test.outcome }}" == "failure" ] || [ "$FAILED_COUNT" -gt 0 ] || [ "$SKIPPED_COUNT" -gt 0 ]; then | |
| echo "Test group ${{ matrix.test_group }} failed (Failures: $FAILED_COUNT, Skipped: $SKIPPED_COUNT)" | |
| exit 1 | |
| fi | |
| # Stage 3: Summary job | |
| test-summary: | |
| name: Test Summary | |
| needs: [pxf-test] | |
| if: always() | |
| runs-on: ubuntu-22.04 | |
| steps: | |
| - name: Download all test artifacts | |
| uses: actions/download-artifact@v4 | |
| with: | |
| path: all-artifacts | |
| pattern: test-results-* | |
| - name: Generate summary | |
| run: | | |
| echo "## PXF Test Results Summary" >> $GITHUB_STEP_SUMMARY | |
| echo "" >> $GITHUB_STEP_SUMMARY | |
| # Overall counters | |
| OVERALL_TOTAL=0 | |
| OVERALL_PASSED=0 | |
| OVERALL_FAILED=0 | |
| OVERALL_SKIPPED=0 | |
| GROUPS_PASSED=0 | |
| GROUPS_FAILED=0 | |
| FAILED_GROUP_NAMES="" | |
| # Collect all test stats | |
| declare -A GROUP_STATS | |
| for dir in all-artifacts/test-results-*; do | |
| if [ -d "$dir" ] && [ -f "$dir/test_stats.json" ]; then | |
| group=$(cat "$dir/test_stats.json" | grep -oP '"group":\s*"\K[^"]+' || basename "$dir" | sed 's/test-results-//') | |
| result=$(cat "$dir/test_stats.json" | grep -oP '"result":\s*"\K[^"]+' || echo "unknown") | |
| total=$(cat "$dir/test_stats.json" | grep -oP '"total":\s*\K\d+' || echo "0") | |
| passed=$(cat "$dir/test_stats.json" | grep -oP '"passed":\s*\K\d+' || echo "0") | |
| failed=$(cat "$dir/test_stats.json" | grep -oP '"failed":\s*\K\d+' || echo "0") | |
| skipped=$(cat "$dir/test_stats.json" | grep -oP '"skipped":\s*\K\d+' || echo "0") | |
| GROUP_STATS[$group]="$result,$total,$passed,$failed,$skipped" | |
| OVERALL_TOTAL=$((OVERALL_TOTAL + total)) | |
| OVERALL_PASSED=$((OVERALL_PASSED + passed)) | |
| OVERALL_FAILED=$((OVERALL_FAILED + failed)) | |
| OVERALL_SKIPPED=$((OVERALL_SKIPPED + skipped)) | |
| if [ "$result" == "success" ] && [ "$failed" -eq 0 ] && [ "$skipped" -eq 0 ]; then | |
| GROUPS_PASSED=$((GROUPS_PASSED + 1)) | |
| else | |
| GROUPS_FAILED=$((GROUPS_FAILED + 1)) | |
| FAILED_GROUP_NAMES="${FAILED_GROUP_NAMES}${group} " | |
| fi | |
| fi | |
| done | |
| # Overall summary | |
| echo "### Overall Summary" >> $GITHUB_STEP_SUMMARY | |
| echo "" >> $GITHUB_STEP_SUMMARY | |
| if [ $GROUPS_FAILED -eq 0 ]; then | |
| echo "✅ **All ${GROUPS_PASSED} test groups passed**" >> $GITHUB_STEP_SUMMARY | |
| else | |
| echo "❌ **${GROUPS_FAILED} of $((GROUPS_PASSED + GROUPS_FAILED)) test groups failed**" >> $GITHUB_STEP_SUMMARY | |
| fi | |
| echo "" >> $GITHUB_STEP_SUMMARY | |
| echo "- Total Tests: $OVERALL_TOTAL" >> $GITHUB_STEP_SUMMARY | |
| echo "- Passed: $OVERALL_PASSED" >> $GITHUB_STEP_SUMMARY | |
| echo "- Failed: $OVERALL_FAILED" >> $GITHUB_STEP_SUMMARY | |
| echo "- Skipped: $OVERALL_SKIPPED" >> $GITHUB_STEP_SUMMARY | |
| echo "" >> $GITHUB_STEP_SUMMARY | |
| # Detailed table | |
| echo "### Test Results by Group" >> $GITHUB_STEP_SUMMARY | |
| echo "" >> $GITHUB_STEP_SUMMARY | |
| echo "| Test Group | Status | Passed | Failed | Skipped | Total |" >> $GITHUB_STEP_SUMMARY | |
| echo "|------------|--------|-------:|-------:|--------:|------:|" >> $GITHUB_STEP_SUMMARY | |
| for group in $(echo "${!GROUP_STATS[@]}" | tr ' ' '\n' | sort); do | |
| IFS=',' read -r result total passed failed skipped <<< "${GROUP_STATS[$group]}" | |
| if [ "$result" == "success" ] && [ "$failed" -eq 0 ] && [ "$skipped" -eq 0 ]; then | |
| status="✅ PASS" | |
| else | |
| status="❌ FAIL" | |
| fi | |
| echo "| $group | $status | $passed | $failed | $skipped | $total |" >> $GITHUB_STEP_SUMMARY | |
| done | |
| echo "" >> $GITHUB_STEP_SUMMARY | |
| # Check if any group failed | |
| if [ $GROUPS_FAILED -gt 0 ]; then | |
| echo "::error::${GROUPS_FAILED} test group(s) failed: ${FAILED_GROUP_NAMES}" | |
| exit 1 | |
| fi |