upd workflows to use requirements.txt (#289)

This commit is contained in:
Jack Forgash 2024-03-13 13:54:26 -06:00 committed by GitHub
parent f174dc3cd4
commit db39b35ceb
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
9 changed files with 252 additions and 73 deletions

View File

@ -22,6 +22,54 @@ concurrency:
group: ${{ github.workflow }}
jobs:
called_workflow_template:
uses: FlipsideCrypto/analytics-workflow-templates/.github/workflows/dbt_docs_updates.yml@AN-4374/upgrade-dbt-1.7
secrets: inherit
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: refresh ddl for datashare
run: |
cnt=$(dbt ls -m fsc_utils.datashare._datashare___create_gold | wc -l ); if [ $cnt -eq 1 ]; then dbt run -m fsc_utils.datashare._datashare___create_gold; fi;
- name: checkout docs branch
run: |
git checkout -B docs origin/main
- name: generate dbt docs
run: dbt docs generate -t prod
- name: move files to docs directory
run: |
mkdir -p ./docs
cp target/{catalog.json,manifest.json,index.html} docs/
- name: clean up target directory
run: dbt clean
- name: check for changes
run: git status
- name: stage changed files
run: git add .
- name: commit changed files
run: |
git config user.email "abc@xyz"
git config user.name "github-actions"
git commit -am "Auto-update docs"
- name: push changes to docs
run: |
git push -f --set-upstream origin docs

View File

@ -58,7 +58,7 @@ jobs:
- name: install dependencies
run: |
pip3 install dbt-snowflake==${{ vars.DBT_VERSION }} cli_passthrough requests click
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs

View File

@ -24,11 +24,30 @@ concurrency:
group: ${{ github.workflow }}
jobs:
called_workflow_template:
uses: FlipsideCrypto/analytics-workflow-templates/.github/workflows/dbt_run_template.yml@AN-4374/upgrade-dbt-1.7
with:
dbt_command: >
dbt run-operation run_sp_create_prod_clone
environment: workflow_prod
warehouse: ${{ vars.WAREHOUSE }}
secrets: inherit
dbt:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
dbt run-operation run_sp_create_prod_clone
- name: Store logs
uses: actions/upload-artifact@v3
with:
name: dbt-logs
path: |
logs
target

View File

@ -37,11 +37,36 @@ concurrency:
group: ${{ github.workflow }}
jobs:
called_workflow_template:
uses: FlipsideCrypto/analytics-workflow-templates/.github/workflows/dbt_run_deployment_template.yml@AN-4374/upgrade-dbt-1.7
with:
dbt_command: |
${{ inputs.dbt_command }}
warehouse: ${{ inputs.WAREHOUSE }}
environment: workflow_prod
secrets: inherit
dbt:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
${{ inputs.dbt_command }}
- name: Run datashare model
run: |
cnt=$(dbt ls -m fsc_utils.datashare._datashare___create_gold | wc -l ); if [ $cnt -eq 1 ]; then dbt run -m fsc_utils.datashare._datashare___create_gold; fi;
dbt run-operation run_query --args "{sql: call admin.datashare.sp_grant_share_permissions('${{ env.DATABASE }}')}"
- name: Store logs
uses: actions/upload-artifact@v3
with:
name: dbt-logs
path: |
logs
target

View File

@ -24,21 +24,40 @@ concurrency:
group: ${{ github.workflow }}
jobs:
called_workflow_template:
uses: FlipsideCrypto/analytics-workflow-templates/.github/workflows/dbt_run_template.yml@AN-4374/upgrade-dbt-1.7
with:
dbt_command: >
dbt run -s
2+streamline__get_transactions_history_mainnet_18
2+streamline__get_transactions_history_mainnet_19
2+streamline__get_transaction_results_history_mainnet_14
2+streamline__get_transaction_results_history_mainnet_15
2+streamline__get_transaction_results_history_mainnet_17
2+streamline__get_transaction_results_history_mainnet_16
2+streamline__get_transaction_results_history_mainnet_18
2+streamline__get_transaction_results_history_mainnet_19
2+streamline__get_transaction_results_history_mainnet_22
--vars '{"STREAMLINE_INVOKE_STREAMS": True}'
environment: workflow_prod
warehouse: ${{ vars.WAREHOUSE }}
secrets: inherit
dbt:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: >
dbt run -s
2+streamline__get_transactions_history_mainnet_18
2+streamline__get_transactions_history_mainnet_19
2+streamline__get_transaction_results_history_mainnet_14
2+streamline__get_transaction_results_history_mainnet_15
2+streamline__get_transaction_results_history_mainnet_17
2+streamline__get_transaction_results_history_mainnet_16
2+streamline__get_transaction_results_history_mainnet_18
2+streamline__get_transaction_results_history_mainnet_19
2+streamline__get_transaction_results_history_mainnet_22
--vars '{"STREAMLINE_INVOKE_STREAMS": True}'
- name: Store logs
uses: actions/upload-artifact@v3
with:
name: dbt-logs
path: |
logs
target

View File

@ -22,16 +22,35 @@ env:
concurrency:
group: ${{ github.workflow }}
jobs:
called_workflow_template:
uses: FlipsideCrypto/analytics-workflow-templates/.github/workflows/dbt_run_template.yml@AN-4374/upgrade-dbt-1.7
with:
dbt_command: >
dbt run -m silver__allday_moments_metadata_needed_s.sql;
dbt run-operation get_allday_metadata;
dbt run -m silver__nft_allday_metadata_s;
dbt run -s tag:topshot;
environment: workflow_prod
warehouse: ${{ vars.WAREHOUSE }}
secrets: inherit
dbt:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: >
dbt run -m silver__allday_moments_metadata_needed_s.sql;
dbt run-operation get_allday_metadata;
dbt run -m silver__nft_allday_metadata_s;
dbt run -s tag:topshot;
- name: Store logs
uses: actions/upload-artifact@v3
with:
name: dbt-logs
path: |
logs
target

View File

@ -24,15 +24,34 @@ env:
concurrency:
group: ${{ github.workflow }}
jobs:
called_workflow_template:
uses: FlipsideCrypto/analytics-workflow-templates/.github/workflows/dbt_run_template.yml@AN-4374/upgrade-dbt-1.7
with:
dbt_command: >
dbt run-operation stage_external_sources --vars "ext_full_refresh: true";
dbt seed;
dbt run -s tag:scheduled_core tag:streamline_complete "flow_models,models/gold" --vars '{"STREAMLINE_START_BLOCK": ${{ vars.STREAMLINE_START_BLOCK }}}'
environment: workflow_prod
warehouse: ${{ vars.WAREHOUSE }}
secrets: inherit
dbt:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: >
dbt run-operation stage_external_sources --vars "ext_full_refresh: true";
dbt seed;
dbt run -s tag:scheduled_core tag:streamline_complete "flow_models,models/gold" --vars '{"STREAMLINE_START_BLOCK": ${{ vars.STREAMLINE_START_BLOCK }}}'
- name: Store logs
uses: actions/upload-artifact@v3
with:
name: dbt-logs
path: |
logs
target

View File

@ -22,15 +22,35 @@ env:
concurrency:
group: ${{ github.workflow }}
jobs:
called_workflow_template:
uses: FlipsideCrypto/analytics-workflow-templates/.github/workflows/dbt_run_template.yml@AN-4374/upgrade-dbt-1.7
with:
dbt_command: >
dbt run-operation stage_external_sources --vars "ext_full_refresh: true";
dbt seed;
dbt run -s tag:scheduled_non_core --vars '{"STREAMLINE_START_BLOCK": ${{ vars.STREAMLINE_START_BLOCK }}}'
environment: workflow_prod
warehouse: ${{ vars.WAREHOUSE }}
secrets: inherit
dbt:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: >
dbt run-operation stage_external_sources --vars "ext_full_refresh: true";
dbt seed;
dbt run -s tag:scheduled_non_core --vars '{"STREAMLINE_START_BLOCK": ${{ vars.STREAMLINE_START_BLOCK }}}'
- name: Store logs
uses: actions/upload-artifact@v3
with:
name: dbt-logs
path: |
logs
target

View File

@ -36,10 +36,11 @@ jobs:
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip3 install dbt-snowflake==${{ env.DBT_VERSION }} cli_passthrough requests click
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
@ -50,3 +51,12 @@ jobs:
- name: Log test results
run: |
python python/dbt_test_alert.py
- name: Store logs
uses: actions/upload-artifact@v3
with:
name: dbt-logs
path: |
logs
target