Skip to content

adding json schema for dbt yaml #25

adding json schema for dbt yaml

adding json schema for dbt yaml #25

name: Prod Deploy dbt metadata
on:
workflow_dispatch:
push:
branches:
- main
env:
NSW_DOE_DATA_STACK_IN_A_BOX_DAGSTER_PROJECT_DIR: ${{ vars.NSW_DOE_DATA_STACK_IN_A_BOX_DAGSTER_PROJECT_DIR }}
NSW_DOE_DATA_STACK_IN_A_BOX_DBT_PROJECT_DIR: ${{ vars.NSW_DOE_DATA_STACK_IN_A_BOX_DBT_PROJECT_DIR }}
NSW_DOE_DATA_STACK_IN_A_BOX__ENV: ${{ vars.NSW_DOE_DATA_STACK_IN_A_BOX__ENV }}
NSW_DOE_DATA_STACK_IN_A_BOX_DB_PATH_AND_DB: ${{ secrets.NSW_DOE_DATA_STACK_IN_A_BOX_DB_PATH_AND_DB }}
NSW_DOE_DATA_STACK_IN_A_BOX_DB_NAME: ${{ vars.NSW_DOE_DATA_STACK_IN_A_BOX_DB_NAME }}
NSW_DOE_DATA_STACK_IN_A_BOX_TARGET_SCHEMA: ${{ vars.NSW_DOE_DATA_STACK_IN_A_BOX_TARGET_SCHEMA }}
MOTHERDUCK_TOKEN: ${{ secrets.MOTHERDUCK_TOKEN }}
DATAFOLD_APIKEY: ${{ secrets.DATAFOLD_APIKEY }}
DAGSTER_HOME: ${{ vars.DAGSTER_HOME }}
AWS_ROLE_TO_ASSUME: ${{ secrets.AWS_ROLE_TO_ASSUME }}
S3_BUCKET_METADATA: ${{ secrets.S3_BUCKET_METADATA }}
# EVIDENCE_SOURCE__nsw_doe_data_stack_in_a_box__filename: ${{ vars.EVIDENCE_SOURCE__nsw_doe_data_stack_in_a_box__filename }}
# EVIDENCE_SOURCE__nsw_doe_data_stack_in_a_box__database:
# required for aws github OICD security
permissions:
id-token: write
contents: read
jobs:
ci__prod__no_source_change:
name: Prod deployment with no source changes
runs-on: ubuntu-latest
environment: prod
steps:
- name: Checkout
uses: actions/[email protected]
- name: Setup Python
uses: actions/[email protected]
with:
python-version: "3.11.x"
- name: Install uv
run: python3 -m pip install uv
- name: Install Python requirements
run: uv pip install --system -r requirements.txt
- name: Dbt checks
run: |
cd ${{github.workspace}}/transformation/transformation_nsw_doe
dbt deps
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v4
with:
# bucket is public so no need for creds
# aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
# aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: ap-southeast-2
role-to-assume: ${{ secrets.AWS_ROLE_TO_ASSUME }}
# role-external-id: ${{ secrets.AWS_ROLE_EXTERNAL_ID }}
# role-duration-seconds: 1200
role-session-name: MySessionName
# - name: Get the last manifest.json from s3
# run: |
# echo s3://$S3_BUCKET_METADATA/prod/manifest/manifest.json
# aws s3 cp s3://$S3_BUCKET_METADATA/prod/manifest/manifest.json ${{github.workspace}}/transformation/transformation_nsw_doe/target/last_manifest/manifest.json
# - name: Deploy
# run: |
# cd ${{github.workspace}}/transformation/transformation_nsw_doe
# dbt build --exclude "+*github*+ +*google*+" --models state:modified+ --state ${{github.workspace}}/transformation/transformation_nsw_doe/target/last_manifest --profiles-dir ${{github.workspace}}/transformation/transformation_nsw_doe
- name: Generate Manifest
run: |
cd ${{github.workspace}}/transformation/transformation_nsw_doe
dbt parse --profiles-dir ${{github.workspace}}/transformation/transformation_nsw_doe
- name: Upload manifest.json from s3
run: |
aws s3 cp ${{github.workspace}}/transformation/transformation_nsw_doe/target/manifest.json s3://$S3_BUCKET_METADATA/prod/manifest/manifest.json
- name: Generate Docs
run: |
cd ${{github.workspace}}/transformation/transformation_nsw_doe
dbt docs generate --no-version-check --profiles-dir ${{github.workspace}}/transformation/transformation_nsw_doe
- name: Publish Docs
run: |
aws s3 cp ${{github.workspace}}/transformation/transformation_nsw_doe/target/index.html s3://$S3_BUCKET_METADATA/
aws s3 cp ${{github.workspace}}/transformation/transformation_nsw_doe/target/catalog.json s3://$S3_BUCKET_METADATA/
aws s3 cp ${{github.workspace}}/transformation/transformation_nsw_doe/target/manifest.json s3://$S3_BUCKET_METADATA/
# reoving datafold until we get a motherduck connection
# - name: Datafold dbt upload
# run: |
# datafold dbt upload --ci-config-id 40 --run-type production --target-folder ${{github.workspace}}/transformation/transformation_nsw_doe/target