mirror of
https://github.com/FlipsideCrypto/boba-models.git
synced 2026-02-06 09:31:59 +00:00
Initial commit
This commit is contained in:
commit
27297a9538
46
.github/workflows/dbt_alter_gha_task.yml
vendored
Normal file
46
.github/workflows/dbt_alter_gha_task.yml
vendored
Normal file
@ -0,0 +1,46 @@
|
||||
name: dbt_alter_gha_task
|
||||
run-name: dbt_alter_gha_task
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
inputs:
|
||||
workflow_name:
|
||||
type: string
|
||||
description: Name of the workflow to perform the action on, no .yml extension
|
||||
required: true
|
||||
task_action:
|
||||
type: choice
|
||||
description: Action to perform
|
||||
required: true
|
||||
options:
|
||||
- SUSPEND
|
||||
- RESUME
|
||||
default: SUSPEND
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/analytics-workflow-templates/.github/workflows/dbt_alter_gha_tasks.yml@AN-4374/upgrade-dbt-1.7
|
||||
with:
|
||||
workflow_name: |
|
||||
${{ inputs.workflow_name }}
|
||||
task_action: |
|
||||
${{ inputs.task_action }}
|
||||
environment: workflow_prod
|
||||
secrets: inherit
|
||||
45
.github/workflows/dbt_deploy_new_workflows.yml
vendored
Normal file
45
.github/workflows/dbt_deploy_new_workflows.yml
vendored
Normal file
@ -0,0 +1,45 @@
|
||||
name: dbt_deploy_new_workflows
|
||||
run-name: dbt_deploy_new_workflows
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_test
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
|
||||
- name: Deploy New Github Actions
|
||||
run: |
|
||||
make deploy_new_github_action DBT_TARGET=prod
|
||||
76
.github/workflows/dbt_docs_update.yml
vendored
Normal file
76
.github/workflows/dbt_docs_update.yml
vendored
Normal file
@ -0,0 +1,76 @@
|
||||
name: docs_update
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
env:
|
||||
USE_VARS: "${{ vars.USE_VARS }}"
|
||||
DBT_PROFILES_DIR: "${{ vars.DBT_PROFILES_DIR }}"
|
||||
DBT_VERSION: "${{ vars.DBT_VERSION }}"
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
|
||||
- name: refresh ddl for datashare
|
||||
run: |
|
||||
cnt=$(dbt ls -m fsc_utils.datashare._datashare___create_gold | wc -l ); if [ $cnt -eq 1 ]; then dbt run -m fsc_utils.datashare._datashare___create_gold; fi;
|
||||
|
||||
- name: checkout docs branch
|
||||
run: |
|
||||
git checkout -B docs origin/main
|
||||
|
||||
- name: generate dbt docs
|
||||
run: dbt docs generate -t prod
|
||||
|
||||
- name: move files to docs directory
|
||||
run: |
|
||||
mkdir -p ./docs
|
||||
cp target/{catalog.json,manifest.json,index.html} docs/
|
||||
|
||||
- name: clean up target directory
|
||||
run: dbt clean
|
||||
|
||||
- name: check for changes
|
||||
run: git status
|
||||
|
||||
- name: stage changed files
|
||||
run: git add .
|
||||
|
||||
- name: commit changed files
|
||||
run: |
|
||||
git config user.email "abc@xyz"
|
||||
git config user.name "github-actions"
|
||||
git commit -am "Auto-update docs"
|
||||
|
||||
- name: push changes to docs
|
||||
run: |
|
||||
git push -f --set-upstream origin docs
|
||||
17
.github/workflows/dbt_integration_test.yml
vendored
Normal file
17
.github/workflows/dbt_integration_test.yml
vendored
Normal file
@ -0,0 +1,17 @@
|
||||
name: dbt_run_integration_test
|
||||
run-name: ${{ github.event.inputs.branch }}
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/analytics-workflow-templates/.github/workflows/dbt.yml@main
|
||||
with:
|
||||
command: >
|
||||
dbt test --selector 'integration_tests'
|
||||
environment: ${{ github.ref == 'refs/heads/main' && 'workflow_prod' || 'workflow_dev' }}
|
||||
warehouse: ${{ vars.WAREHOUSE }}
|
||||
secrets: inherit
|
||||
66
.github/workflows/dbt_run_adhoc.yml
vendored
Normal file
66
.github/workflows/dbt_run_adhoc.yml
vendored
Normal file
@ -0,0 +1,66 @@
|
||||
name: dbt_run_adhoc
|
||||
run-name: ${{ inputs.dbt_command }}
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
inputs:
|
||||
environment:
|
||||
type: choice
|
||||
description: DBT Run Environment
|
||||
required: true
|
||||
options:
|
||||
- dev
|
||||
- prod
|
||||
default: dev
|
||||
warehouse:
|
||||
type: choice
|
||||
description: Snowflake warehouse
|
||||
required: true
|
||||
options:
|
||||
- DBT
|
||||
- DBT_CLOUD
|
||||
- DBT_EMERGENCY
|
||||
default: DBT
|
||||
dbt_command:
|
||||
type: string
|
||||
description: 'DBT Run Command'
|
||||
required: true
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ inputs.warehouse }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_${{ inputs.environment }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
${{ inputs.dbt_command }}
|
||||
67
.github/workflows/dbt_run_deployment.yml
vendored
Normal file
67
.github/workflows/dbt_run_deployment.yml
vendored
Normal file
@ -0,0 +1,67 @@
|
||||
name: dbt_run_deployment
|
||||
run-name: ${{ inputs.dbt_command }}
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
inputs:
|
||||
warehouse:
|
||||
type: choice
|
||||
description: Snowflake warehouse
|
||||
required: true
|
||||
options:
|
||||
- DBT
|
||||
- DBT_CLOUD
|
||||
- DBT_EMERGENCY
|
||||
default: DBT
|
||||
dbt_command:
|
||||
type: string
|
||||
description: 'DBT Run Command'
|
||||
required: true
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
DBT_VERSION: "${{ vars.DBT_VERSION }}"
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ inputs.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
dbt:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
${{ inputs.dbt_command }}
|
||||
- name: Run datashare model
|
||||
run: |
|
||||
cnt=$(dbt ls -m fsc_utils.datashare._datashare___create_gold | wc -l ); if [ $cnt -eq 1 ]; then dbt run -m fsc_utils.datashare._datashare___create_gold; fi;
|
||||
dbt run-operation run_query --args "{sql: call admin.datashare.sp_grant_share_permissions('${{ env.DATABASE }}')}"
|
||||
- name: Store logs
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: dbt-logs
|
||||
path: |
|
||||
logs
|
||||
target
|
||||
68
.github/workflows/dbt_run_dev_refresh.yml
vendored
Normal file
68
.github/workflows/dbt_run_dev_refresh.yml
vendored
Normal file
@ -0,0 +1,68 @@
|
||||
name: dbt_run_dev_refresh
|
||||
run-name: dbt_run_dev_refresh
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs_refresh:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
- name: Run Dev Refresh
|
||||
run: |
|
||||
dbt run-operation fsc_evm.run_sp_create_prod_clone
|
||||
|
||||
run_dbt_jobs_udfs:
|
||||
runs-on: ubuntu-latest
|
||||
needs: run_dbt_jobs_refresh
|
||||
environment:
|
||||
name: workflow_dev
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
|
||||
- name: Run Recreate UDFs
|
||||
run: |
|
||||
dbt run-operation fsc_utils.create_evm_streamline_udfs --vars '{"UPDATE_UDFS_AND_SPS":True}' -t dev
|
||||
dbt run -s livequery_models.deploy.core._live --vars '{"UPDATE_UDFS_AND_SPS":True}' -t dev
|
||||
48
.github/workflows/dbt_run_full_observability.yml
vendored
Normal file
48
.github/workflows/dbt_run_full_observability.yml
vendored
Normal file
@ -0,0 +1,48 @@
|
||||
name: dbt_run_full_observability
|
||||
run-name: dbt_run_full_observability
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod_2xl
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
|
||||
- name: Run Observability Models
|
||||
run: |
|
||||
dbt run --threads 2 --vars '{"OBSERV_FULL_TEST":True}' -m "fsc_evm,tag:observability"
|
||||
|
||||
|
||||
|
||||
45
.github/workflows/dbt_run_heal_models.yml
vendored
Normal file
45
.github/workflows/dbt_run_heal_models.yml
vendored
Normal file
@ -0,0 +1,45 @@
|
||||
name: dbt_run_heal_models
|
||||
run-name: dbt_run_heal_models
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
|
||||
- name: Run Heal Models
|
||||
run: |
|
||||
dbt run -m "<evm_chain>_models,tag:heal" --vars '{"HEAL_MODEL":True}'
|
||||
51
.github/workflows/dbt_run_operation_reorg.yml
vendored
Normal file
51
.github/workflows/dbt_run_operation_reorg.yml
vendored
Normal file
@ -0,0 +1,51 @@
|
||||
name: dbt_run_operation_reorg
|
||||
run-name: dbt_run_operation_reorg
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
|
||||
- name: List reorg models
|
||||
id: list_models
|
||||
run: |
|
||||
reorg_model_list=$(dbt list --select "<evm_chain>_models,tag:reorg" --resource-type model --output name | grep '__' | awk -F'.' '{print $NF}' | tr '\n' ',' | sed 's/,$//')
|
||||
echo "model_list=$reorg_model_list" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Execute block_reorg macro
|
||||
run: |
|
||||
dbt run-operation fsc_utils.block_reorg --args "{reorg_model_list: '${{ steps.list_models.outputs.model_list }}', hours: '12'}" && awk '/SQL status/ {print; next} /DELETE FROM/{getline; print} /\/\* {/ {print}' logs/dbt.log
|
||||
45
.github/workflows/dbt_run_scheduled_abis.yml
vendored
Normal file
45
.github/workflows/dbt_run_scheduled_abis.yml
vendored
Normal file
@ -0,0 +1,45 @@
|
||||
name: dbt_run_scheduled_abis
|
||||
run-name: dbt_run_scheduled_abis
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_test
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
|
||||
- name: Run ABI Models
|
||||
run: |
|
||||
dbt run -m "fsc_evm,tag:silver_abis" "fsc_evm,tag:gold_abis"
|
||||
45
.github/workflows/dbt_run_scheduled_curated.yml
vendored
Normal file
45
.github/workflows/dbt_run_scheduled_curated.yml
vendored
Normal file
@ -0,0 +1,45 @@
|
||||
name: dbt_run_scheduled_curated
|
||||
run-name: dbt_run_scheduled_curated
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
|
||||
- name: Run Curated Models
|
||||
run: |
|
||||
dbt run -m "<evm_chain>_models,tag:curated" "fsc_evm,tag:curated"
|
||||
49
.github/workflows/dbt_run_scheduled_main.yml
vendored
Normal file
49
.github/workflows/dbt_run_scheduled_main.yml
vendored
Normal file
@ -0,0 +1,49 @@
|
||||
name: dbt_run_scheduled_main
|
||||
run-name: dbt_run_scheduled_main
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
|
||||
- name: Run Main Models
|
||||
run: |
|
||||
dbt run -m "fsc_evm,tag:silver_core" "fsc_evm,tag:gold_core" "fsc_evm,tag:silver_prices" "fsc_evm,tag:gold_prices" "fsc_evm,tag:silver_labels" "fsc_evm,tag:gold_labels"
|
||||
|
||||
- name: Run Streamline Models
|
||||
run: |
|
||||
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "fsc_evm,tag:streamline_decoded_logs_realtime" "fsc_evm,tag:streamline_decoded_logs_complete"
|
||||
49
.github/workflows/dbt_run_streamline_chainhead.yml
vendored
Normal file
49
.github/workflows/dbt_run_streamline_chainhead.yml
vendored
Normal file
@ -0,0 +1,49 @@
|
||||
name: dbt_run_streamline_chainhead
|
||||
run-name: dbt_run_streamline_chainhead
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
|
||||
- name: Run Chainhead Models
|
||||
run: |
|
||||
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "fsc_evm,tag:streamline_core_complete" "fsc_evm,tag:streamline_core_realtime" "fsc_evm,tag:streamline_core_complete_receipts" "fsc_evm,tag:streamline_core_realtime_receipts"
|
||||
|
||||
- name: Run Chainhead Tests
|
||||
run: |
|
||||
dbt test -m "fsc_evm,tag:chainhead"
|
||||
44
.github/workflows/dbt_run_streamline_decoder.yml
vendored
Normal file
44
.github/workflows/dbt_run_streamline_decoder.yml
vendored
Normal file
@ -0,0 +1,44 @@
|
||||
name: dbt_run_streamline_decoder
|
||||
run-name: dbt_run_streamline_decoder
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
dbt run -m "fsc_evm,tag:decoded_logs"
|
||||
44
.github/workflows/dbt_run_streamline_decoder_history.yml
vendored
Normal file
44
.github/workflows/dbt_run_streamline_decoder_history.yml
vendored
Normal file
@ -0,0 +1,44 @@
|
||||
name: dbt_run_streamline_decoder_history
|
||||
run-name: dbt_run_streamline_decoder_history
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
- name: Run DBT Jobs
|
||||
run: |
|
||||
dbt run --threads 8 --vars '{"STREAMLINE_INVOKE_STREAMS":True,"WAIT":120}' -m "fsc_evm,tag:streamline_decoded_logs_complete" "fsc_evm,tag:streamline_decoded_logs_history"
|
||||
45
.github/workflows/dbt_run_streamline_history.yml
vendored
Normal file
45
.github/workflows/dbt_run_streamline_history.yml
vendored
Normal file
@ -0,0 +1,45 @@
|
||||
name: dbt_run_streamline_history
|
||||
run-name: dbt_run_streamline_history
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_prod
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
|
||||
- name: Run History Models
|
||||
run: |
|
||||
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "fsc_evm,tag:streamline_core_complete" "fsc_evm,tag:streamline_core_history" "fsc_evm,tag:streamline_core_complete_receipts" "fsc_evm,tag:streamline_core_history_receipts"
|
||||
49
.github/workflows/dbt_test_daily.yml
vendored
Normal file
49
.github/workflows/dbt_test_daily.yml
vendored
Normal file
@ -0,0 +1,49 @@
|
||||
name: dbt_test_daily
|
||||
run-name: dbt_test_daily
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_test
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
|
||||
- name: Build Daily Testing Views
|
||||
run: |
|
||||
dbt run -m "fsc_evm,tag:daily_test"
|
||||
|
||||
- name: Run Daily Tests
|
||||
run: |
|
||||
dbt test -m "fsc_evm,tag:daily_test"
|
||||
49
.github/workflows/dbt_test_intraday.yml
vendored
Normal file
49
.github/workflows/dbt_test_intraday.yml
vendored
Normal file
@ -0,0 +1,49 @@
|
||||
name: dbt_test_intraday
|
||||
run-name: dbt_test_intraday
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_test
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
|
||||
- name: Build Recent Testing Views
|
||||
run: |
|
||||
dbt run -m "fsc_evm,tag:recent_test"
|
||||
|
||||
- name: Run Recent Tests
|
||||
run: |
|
||||
dbt test -m "fsc_evm,tag:recent_test"
|
||||
49
.github/workflows/dbt_test_monthly.yml
vendored
Normal file
49
.github/workflows/dbt_test_monthly.yml
vendored
Normal file
@ -0,0 +1,49 @@
|
||||
name: dbt_test_monthly
|
||||
run-name: dbt_test_monthly
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
run_dbt_jobs:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: workflow_test
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
|
||||
- name: install dependencies
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
dbt deps
|
||||
|
||||
- name: Build Full Testing Views
|
||||
run: |
|
||||
dbt run -m "fsc_evm,tag:full_test"
|
||||
|
||||
- name: Run Full Tests
|
||||
run: |
|
||||
dbt test -m "fsc_evm,tag:full_test"
|
||||
27
.github/workflows/dbt_test_tasks.yml
vendored
Normal file
27
.github/workflows/dbt_test_tasks.yml
vendored
Normal file
@ -0,0 +1,27 @@
|
||||
name: dbt_test_tasks
|
||||
run-name: dbt_test_tasks
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
env:
|
||||
DBT_PROFILES_DIR: ./
|
||||
|
||||
ACCOUNT: "${{ vars.ACCOUNT }}"
|
||||
ROLE: "${{ vars.ROLE }}"
|
||||
USER: "${{ vars.USER }}"
|
||||
PASSWORD: "${{ secrets.PASSWORD }}"
|
||||
REGION: "${{ vars.REGION }}"
|
||||
DATABASE: "${{ vars.DATABASE }}"
|
||||
WAREHOUSE: "${{ vars.WAREHOUSE }}"
|
||||
SCHEMA: "${{ vars.SCHEMA }}"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
|
||||
jobs:
|
||||
called_workflow_template:
|
||||
uses: FlipsideCrypto/analytics-workflow-templates/.github/workflows/dbt_test_tasks.yml@AN-4374/upgrade-dbt-1.7
|
||||
secrets: inherit
|
||||
20
.gitignore
vendored
Normal file
20
.gitignore
vendored
Normal file
@ -0,0 +1,20 @@
|
||||
|
||||
target/
|
||||
dbt_modules/
|
||||
# newer versions of dbt use this directory instead of dbt_modules for test dependencies
|
||||
dbt_packages/
|
||||
logs/
|
||||
|
||||
.venv/
|
||||
.python-version
|
||||
|
||||
# Visual Studio Code files
|
||||
*/.vscode
|
||||
*.code-workspace
|
||||
.history/
|
||||
**/.DS_Store
|
||||
.vscode/
|
||||
.env
|
||||
dbt-env/
|
||||
|
||||
package-lock.yml
|
||||
21
LICENSE
Normal file
21
LICENSE
Normal file
@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2024 Flipside Crypto
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
145
README.md
Normal file
145
README.md
Normal file
@ -0,0 +1,145 @@
|
||||
## Repo Set Up
|
||||
1. Create a new repository from the [evm-models-template](https://github.com/FlipsideCrypto/evm-models-template)
|
||||
2. Update all references to `<evm_chain>` to the new chain name, in lowercase by using find and replace
|
||||
3. Update the fsc-evm package version in `packages.yml` to the latest version
|
||||
4. Set up the rest of the dbt project, where applicable, including but not limited to:
|
||||
- `dbt_project.yml` (enable/disable packages, vars, etc.)
|
||||
- `.github/workflows` (update tags, etc.)
|
||||
- `github_actions__workflows.csv` (update schedule, workflows, etc.)
|
||||
- `overview.md` (update `<evm_chain>`, table references, docs etc.)
|
||||
- `sources.yml` (update schemas, tables etc.)
|
||||
- `requirements.txt` (update dependencies)
|
||||
- other files where applicable
|
||||
|
||||
## Profile Set Up
|
||||
|
||||
#### Use the following within profiles.yml
|
||||
----
|
||||
|
||||
```yml
|
||||
<chain>: -- replace <chain>/<CHAIN> with the profile or name from, remove this comment in your yml
|
||||
target: dev
|
||||
outputs:
|
||||
dev:
|
||||
type: snowflake
|
||||
account: <ACCOUNT>
|
||||
role: INTERNAL_DEV
|
||||
user: <USERNAME>
|
||||
authenticator: externalbrowser
|
||||
region: us-east-1
|
||||
database: <CHAIN>_DEV
|
||||
warehouse: DBT
|
||||
schema: silver
|
||||
threads: 4
|
||||
client_session_keep_alive: False
|
||||
query_tag: dbt_<USERNAME>_dev
|
||||
|
||||
prod:
|
||||
type: snowflake
|
||||
account: <ACCOUNT>
|
||||
role: DBT_CLOUD_<CHAIN>
|
||||
user: <USERNAME>
|
||||
authenticator: externalbrowser
|
||||
region: us-east-1
|
||||
database: <CHAIN>
|
||||
warehouse: DBT_CLOUD_<CHAIN>
|
||||
schema: silver
|
||||
threads: 4
|
||||
client_session_keep_alive: False
|
||||
query_tag: dbt_<USERNAME>_dev
|
||||
```
|
||||
|
||||
### Common DBT Run Variables
|
||||
|
||||
The following variables can be used to control various aspects of the dbt run. Use them with the `--vars` flag when running dbt commands.
|
||||
|
||||
| Variable | Description | Example Usage |
|
||||
|----------|-------------|---------------|
|
||||
| `UPDATE_UDFS_AND_SPS` | Update User Defined Functions and Stored Procedures. By default, this is set to False | `--vars '{"UPDATE_UDFS_AND_SPS":true}'` |
|
||||
| `STREAMLINE_INVOKE_STREAMS` | Invoke Streamline processes. By default, this is set to False | `--vars '{"STREAMLINE_INVOKE_STREAMS":true}'` |
|
||||
| `STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES` | Use development environment for external tables. By default, this is set to False | `--vars '{"STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES":true}'` |
|
||||
| `HEAL_CURATED_MODEL` | Heal specific curated models. By default, this is set to an empty array []. See more below. | `--vars '{"HEAL_CURATED_MODEL":["axelar","across","celer_cbridge"]}'` |
|
||||
| `UPDATE_SNOWFLAKE_TAGS` | Control updating of Snowflake tags. By default, this is set to False | `--vars '{"UPDATE_SNOWFLAKE_TAGS":false}'` |
|
||||
| `START_GHA_TASKS` | Start GitHub Actions tasks. By default, this is set to False | `--vars '{"START_GHA_TASKS":true}'` |
|
||||
|
||||
#### Example Commands
|
||||
|
||||
1. Update UDFs and SPs:
|
||||
```
|
||||
dbt run --vars '{"UPDATE_UDFS_AND_SPS":true}' -m ...
|
||||
```
|
||||
|
||||
2. Invoke Streamline and use dev for external tables:
|
||||
```
|
||||
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":true,"STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES":true}' -m ...
|
||||
```
|
||||
|
||||
3. Heal specific curated models:
|
||||
```
|
||||
dbt run --vars '{"HEAL_CURATED_MODEL":["axelar","across","celer_cbridge"]}' -m ...
|
||||
```
|
||||
|
||||
4. Update Snowflake tags for a specific model:
|
||||
```
|
||||
dbt run --vars '{"UPDATE_SNOWFLAKE_TAGS":true}' -s models/silver/utilities/silver__number_sequence.sql
|
||||
```
|
||||
|
||||
5. Start GHA tasks:
|
||||
```
|
||||
dbt seed -s github_actions__workflows && dbt run -m models/github_actions --full-refresh && dbt run-operation fsc_utils.create_gha_tasks --vars '{"START_GHA_TASKS":True}'
|
||||
```
|
||||
|
||||
6. Using two or more variables:
|
||||
```
|
||||
dbt run --vars '{"UPDATE_UDFS_AND_SPS":true,"STREAMLINE_INVOKE_STREAMS":true,"STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES":true}' -m ...
|
||||
```
|
||||
|
||||
> Note: Replace `-m ...` with appropriate model selections or tags as needed for your project structure.
|
||||
|
||||
## FSC_EVM
|
||||
|
||||
`fsc_evm` is a collection of macros, models, and other resources that are used to build the Flipside Crypto EVM models.
|
||||
|
||||
For more information on the `fsc_evm` package, see the [FSC_EVMWiki](https://github.com/FlipsideCrypto/fsc-evm/wiki).
|
||||
|
||||
## Applying Model Tags
|
||||
|
||||
### Database / Schema level tags
|
||||
|
||||
Database and schema tags are applied via the `fsc_evm.add_database_or_schema_tags` macro. These tags are inherited by their downstream objects. To add/modify tags call the appropriate tag set function within the macro.
|
||||
|
||||
```
|
||||
{{ fsc_evm.set_database_tag_value('SOME_DATABASE_TAG_KEY','SOME_DATABASE_TAG_VALUE') }}
|
||||
{{ fsc_evm.set_schema_tag_value('SOME_SCHEMA_TAG_KEY','SOME_SCHEMA_TAG_VALUE') }}
|
||||
```
|
||||
|
||||
### Model tags
|
||||
|
||||
To add/update a model's snowflake tags, add/modify the `meta` model property under `config`. Only table level tags are supported at this time via DBT.
|
||||
|
||||
{% raw %}
|
||||
{{ config(
|
||||
...,
|
||||
meta={
|
||||
'database_tags':{
|
||||
'table': {
|
||||
'PURPOSE': 'SOME_PURPOSE'
|
||||
}
|
||||
}
|
||||
},
|
||||
...
|
||||
) }}
|
||||
{% endraw %}
|
||||
|
||||
By default, model tags are pushed to Snowflake on each load. You can disable this by setting the `UPDATE_SNOWFLAKE_TAGS` project variable to `False` during a run.
|
||||
|
||||
```
|
||||
dbt run --vars '{"UPDATE_SNOWFLAKE_TAGS":False}' -s models/silver/utilities/silver__number_sequence.sql
|
||||
```
|
||||
|
||||
### Querying for existing tags on a model in snowflake
|
||||
|
||||
```
|
||||
select *
|
||||
from table(<chain>.information_schema.tag_references('<chain>.core.fact_blocks', 'table'));
|
||||
```
|
||||
0
analysis/.gitkeep
Normal file
0
analysis/.gitkeep
Normal file
0
data/.gitkeep
Normal file
0
data/.gitkeep
Normal file
3
data/github_actions__workflows.csv
Normal file
3
data/github_actions__workflows.csv
Normal file
@ -0,0 +1,3 @@
|
||||
workflow_name,workflow_schedule
|
||||
dbt_run_streamline_chainhead,"<insert-cron-schedule>"
|
||||
dbt_run_scheduled_main,"<insert-cron-schedule>"
|
||||
|
126
dbt_project.yml
Normal file
126
dbt_project.yml
Normal file
@ -0,0 +1,126 @@
|
||||
# Name your project! Project names should contain only lowercase characters
|
||||
# and underscores. A good package name should reflect your organization's
|
||||
# name or the intended use of these models
|
||||
name: "<evm_chain>_models" # replace with the name of the chain
|
||||
version: "1.0.0"
|
||||
config-version: 2
|
||||
|
||||
# This setting configures which "profile" dbt uses for this project.
|
||||
profile: "<evm_chain>" # replace with the name of the chain
|
||||
|
||||
# These configurations specify where dbt should look for different types of files.
|
||||
# The `source-paths` config, for example, states that models in this project can be
|
||||
# found in the "models/" directory. You probably won't need to change these!
|
||||
model-paths: ["models"]
|
||||
analysis-paths: ["analysis"]
|
||||
test-paths: ["tests"]
|
||||
seed-paths: ["data"]
|
||||
macro-paths: ["macros"]
|
||||
snapshot-paths: ["snapshots"]
|
||||
docs-paths: ["dbt_packages/fsc_evm/doc_descriptions", "models/doc_descriptions", "models"]
|
||||
|
||||
target-path: "target" # directory which will store compiled SQL files
|
||||
clean-targets: # directories to be removed by `dbt clean`
|
||||
- "target"
|
||||
- "dbt_modules"
|
||||
- "dbt_packages"
|
||||
|
||||
tests:
|
||||
<evm_chain>_models: # replace with the name of the chain
|
||||
+store_failures: true # all tests
|
||||
fsc_evm:
|
||||
+store_failures: true
|
||||
|
||||
on-run-start:
|
||||
- "{{ fsc_evm.create_sps() }}"
|
||||
- "{{ fsc_evm.create_udfs() }}"
|
||||
|
||||
on-run-end:
|
||||
- '{{ fsc_evm.apply_meta_as_tags(results) }}'
|
||||
|
||||
dispatch:
|
||||
- macro_namespace: dbt
|
||||
search_order:
|
||||
- <evm_chain>-models
|
||||
- dbt_snowflake_query_tags
|
||||
- dbt
|
||||
|
||||
query-comment:
|
||||
comment: '{{ dbt_snowflake_query_tags.get_query_comment(node) }}'
|
||||
append: true # Snowflake removes prefixed comments.
|
||||
|
||||
# Configuring models
|
||||
# Full documentation: https://docs.getdbt.com/docs/configuring-models
|
||||
|
||||
models:
|
||||
<evm_chain>_models: # replace with the name of the chain
|
||||
+copy_grants: true
|
||||
+persist_docs:
|
||||
relation: true
|
||||
columns: true
|
||||
+on_schema_change: "append_new_columns"
|
||||
livequery_models:
|
||||
+materialized: ephemeral
|
||||
fsc_evm:
|
||||
+enabled: false # disable fsc_evm package by default
|
||||
+copy_grants: true
|
||||
+persist_docs:
|
||||
relation: true
|
||||
columns: true
|
||||
+on_schema_change: "append_new_columns"
|
||||
main_package:
|
||||
+enabled: false # disable main_package by default, enabled other packages as needed
|
||||
core:
|
||||
+enabled: true # enable subpackages, as needed
|
||||
github_actions:
|
||||
+enabled: true
|
||||
labels:
|
||||
+enabled: true
|
||||
prices:
|
||||
+enabled: true
|
||||
utils:
|
||||
+enabled: true
|
||||
|
||||
vars:
|
||||
"dbt_date:time_zone": GMT
|
||||
STREAMLINE_INVOKE_STREAMS: False
|
||||
STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES: False
|
||||
UPDATE_UDFS_AND_SPS: False
|
||||
UPDATE_SNOWFLAKE_TAGS: True
|
||||
OBSERV_FULL_TEST: False
|
||||
WAIT: 0
|
||||
HEAL_MODEL: False
|
||||
HEAL_MODELS: []
|
||||
START_GHA_TASKS: False
|
||||
|
||||
#### STREAMLINE 2.0 BEGIN ####
|
||||
|
||||
API_INTEGRATION: '{{ var("config")[target.name]["API_INTEGRATION"] if var("config")[target.name] else var("config")["dev"]["API_INTEGRATION"] }}'
|
||||
EXTERNAL_FUNCTION_URI: '{{ var("config")[target.name]["EXTERNAL_FUNCTION_URI"] if var("config")[target.name] else var("config")["dev"]["EXTERNAL_FUNCTION_URI"] }}'
|
||||
ROLES: |
|
||||
["INTERNAL_DEV"]
|
||||
|
||||
config:
|
||||
# The keys correspond to dbt profiles and are case sensitive
|
||||
dev:
|
||||
API_INTEGRATION:
|
||||
EXTERNAL_FUNCTION_URI:
|
||||
ROLES:
|
||||
- AWS_LAMBDA_<EVM_CHAIN>_API # replace with the name of the chain
|
||||
- INTERNAL_DEV
|
||||
|
||||
prod:
|
||||
API_INTEGRATION:
|
||||
EXTERNAL_FUNCTION_URI:
|
||||
ROLES:
|
||||
- AWS_LAMBDA_<EVM_CHAIN>_API # replace with the name of the chain
|
||||
- INTERNAL_DEV
|
||||
- DBT_CLOUD_<EVM_CHAIN> # replace with the name of the chain
|
||||
|
||||
#### STREAMLINE 2.0 END ####
|
||||
|
||||
#### FSC_EVM BEGIN ####
|
||||
|
||||
# Please visit https://github.com/FlipsideCrypto/fsc-evm/wiki for more information on required and optional variables
|
||||
|
||||
#### FSC_EVM END ####
|
||||
11
macros/custom_naming_macros.sql
Normal file
11
macros/custom_naming_macros.sql
Normal file
@ -0,0 +1,11 @@
|
||||
{% macro generate_schema_name(custom_schema_name=none, node=none) -%}
|
||||
{% set node_name = node.name %}
|
||||
{% set split_name = node_name.split('__') %}
|
||||
{{ split_name[0] | trim }}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro generate_alias_name(custom_alias_name=none, node=none) -%}
|
||||
{% set node_name = node.name %}
|
||||
{% set split_name = node_name.split('__') %}
|
||||
{{ split_name[1] | trim }}
|
||||
{%- endmacro %}
|
||||
44
macros/dbt/get_merge_sql.sql
Normal file
44
macros/dbt/get_merge_sql.sql
Normal file
@ -0,0 +1,44 @@
|
||||
{% macro get_merge_sql(
|
||||
target,
|
||||
source,
|
||||
unique_key,
|
||||
dest_columns,
|
||||
incremental_predicates
|
||||
) -%}
|
||||
{% set predicate_override = "" %}
|
||||
{% if incremental_predicates [0] == "dynamic_range" %}
|
||||
-- run some queries to dynamically determine the min + max of this 'input_column' in the new data
|
||||
{% set input_column = incremental_predicates [1] %}
|
||||
{% set get_limits_query %}
|
||||
SELECT
|
||||
MIN(
|
||||
{{ input_column }}
|
||||
) AS lower_limit,
|
||||
MAX(
|
||||
{{ input_column }}
|
||||
) AS upper_limit
|
||||
FROM
|
||||
{{ source }}
|
||||
|
||||
{% endset %}
|
||||
{% set limits = run_query(get_limits_query) [0] %}
|
||||
{% set lower_limit,
|
||||
upper_limit = limits [0],
|
||||
limits [1] %}
|
||||
-- use those calculated min + max values to limit 'target' scan, to only the days with new data
|
||||
{% set predicate_override %}
|
||||
dbt_internal_dest.{{ input_column }} BETWEEN '{{ lower_limit }}'
|
||||
AND '{{ upper_limit }}' {% endset %}
|
||||
{% endif %}
|
||||
|
||||
{% set predicates = [predicate_override] if predicate_override else incremental_predicates %}
|
||||
-- standard merge from here
|
||||
{% set merge_sql = dbt.get_merge_sql(
|
||||
target,
|
||||
source,
|
||||
unique_key,
|
||||
dest_columns,
|
||||
predicates
|
||||
) %}
|
||||
{{ return(merge_sql) }}
|
||||
{% endmacro %}
|
||||
8
macros/dbt/get_tmp_relation_type.sql
Normal file
8
macros/dbt/get_tmp_relation_type.sql
Normal file
@ -0,0 +1,8 @@
|
||||
{% macro dbt_snowflake_get_tmp_relation_type(
|
||||
strategy,
|
||||
unique_key,
|
||||
language
|
||||
) %}
|
||||
-- always table
|
||||
{{ return('table') }}
|
||||
{% endmacro %}
|
||||
44
makefile
Normal file
44
makefile
Normal file
@ -0,0 +1,44 @@
|
||||
DBT_TARGET ?= dev
|
||||
|
||||
deploy_streamline_functions:
|
||||
rm -f package-lock.yml && dbt clean && dbt deps
|
||||
dbt run -s livequery_models.deploy.core --vars '{"UPDATE_UDFS_AND_SPS":True}' -t $(DBT_TARGET)
|
||||
dbt run-operation fsc_utils.create_evm_streamline_udfs --vars '{"UPDATE_UDFS_AND_SPS":True}' -t $(DBT_TARGET)
|
||||
|
||||
cleanup_time:
|
||||
rm -f package-lock.yml && dbt clean && dbt deps
|
||||
|
||||
deploy_streamline_tables:
|
||||
rm -f package-lock.yml && dbt clean && dbt deps
|
||||
ifeq ($(findstring dev,$(DBT_TARGET)),dev)
|
||||
dbt run -m "fsc_evm,tag:bronze_external" --vars '{"STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES":True}' -t $(DBT_TARGET)
|
||||
else
|
||||
dbt run -m "fsc_evm,tag:bronze_external" -t $(DBT_TARGET)
|
||||
endif
|
||||
dbt run -m "fsc_evm,tag:streamline_core_complete" "fsc_evm,tag:streamline_core_realtime" "fsc_evm,tag:utils" --full-refresh -t $(DBT_TARGET)
|
||||
|
||||
deploy_streamline_requests:
|
||||
rm -f package-lock.yml && dbt clean && dbt deps
|
||||
dbt run -m "fsc_evm,tag:streamline_core_complete" "fsc_evm,tag:streamline_core_realtime" --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -t $(DBT_TARGET)
|
||||
|
||||
deploy_github_actions:
|
||||
dbt run -s livequery_models.deploy.marketplace.github --vars '{"UPDATE_UDFS_AND_SPS":True}' -t $(DBT_TARGET)
|
||||
dbt seed -s github_actions__workflows -t $(DBT_TARGET)
|
||||
dbt run -m "fsc_evm,tag:gha_tasks" --full-refresh -t $(DBT_TARGET)
|
||||
ifeq ($(findstring dev,$(DBT_TARGET)),dev)
|
||||
dbt run-operation fsc_utils.create_gha_tasks --vars '{"START_GHA_TASKS":False}' -t $(DBT_TARGET)
|
||||
else
|
||||
dbt run-operation fsc_utils.create_gha_tasks --vars '{"START_GHA_TASKS":True}' -t $(DBT_TARGET)
|
||||
endif
|
||||
|
||||
deploy_new_github_action:
|
||||
dbt run-operation fsc_evm.drop_github_actions_schema -t $(DBT_TARGET)
|
||||
dbt seed -s github_actions__workflows -t $(DBT_TARGET)
|
||||
dbt run -m "fsc_evm,tag:gha_tasks" --full-refresh -t $(DBT_TARGET)
|
||||
ifeq ($(findstring dev,$(DBT_TARGET)),dev)
|
||||
dbt run-operation fsc_utils.create_gha_tasks --vars '{"START_GHA_TASKS":False}' -t $(DBT_TARGET)
|
||||
else
|
||||
dbt run-operation fsc_utils.create_gha_tasks --vars '{"START_GHA_TASKS":True}' -t $(DBT_TARGET)
|
||||
endif
|
||||
|
||||
.PHONY: deploy_streamline_functions deploy_streamline_tables deploy_streamline_requests deploy_github_actions cleanup_time deploy_new_github_action
|
||||
81
models/__overview__.md
Normal file
81
models/__overview__.md
Normal file
@ -0,0 +1,81 @@
|
||||
{% docs __overview__ %}
|
||||
|
||||
# Welcome to the Flipside Crypto Core Models Documentation!
|
||||
|
||||
## **What does this documentation cover?**
|
||||
The documentation included here details the design of the Core tables and views available via [Flipside Crypto.](https://flipsidecrypto.xyz/) For more information on how these models are built, please see [the github repository.](https://github.com/FlipsideCrypto/<evm_chain>-models)
|
||||
|
||||
## **How do I use these docs?**
|
||||
The easiest way to navigate this documentation is to use the Quick Links below. These links will take you to the documentation for each table, which contains a description, a list of the columns, and other helpful information.
|
||||
|
||||
If you are experienced with dbt docs, feel free to use the sidebar to navigate the documentation, as well as explore the relationships between tables and the logic building them.
|
||||
|
||||
There is more information on how to use dbt docs in the last section of this document.
|
||||
|
||||
## **Quick Links to Table Documentation**
|
||||
|
||||
**Click on the links below to jump to the documentation for each schema.**
|
||||
|
||||
### Core Tables (<evm_chain>.core)
|
||||
|
||||
**Dimension Tables:**
|
||||
- [dim_labels](https://flipsidecrypto.github.io/<evm_chain>-models/#!/model/model.fsc_evm.core__dim_labels)
|
||||
|
||||
**Fact Tables:**
|
||||
- [fact_blocks](https://flipsidecrypto.github.io/<evm_chain>-models/#!/model/model.fsc_evm.core__fact_blocks)
|
||||
- [fact_event_logs](https://flipsidecrypto.github.io/<evm_chain>-models/#!/model/model.fsc_evm.core__fact_event_logs)
|
||||
- [fact_transactions](https://flipsidecrypto.github.io/<evm_chain>-models/#!/model/model.fsc_evm.core__fact_transactions)
|
||||
- [fact_traces](https://flipsidecrypto.github.io/<evm_chain>-models/#!/model/model.fsc_evm.core__fact_traces)
|
||||
|
||||
### Price Tables (<evm_chain>.price)
|
||||
- [dim_asset_metadata](https://flipsidecrypto.github.io/<evm_chain>-models/#!/model/model.fsc_evm.price__dim_asset_metadata)
|
||||
- [fact_prices_ohlc_hourly](https://flipsidecrypto.github.io/<evm_chain>-models/#!/model/model.fsc_evm.price__fact_prices_ohlc_hourly)
|
||||
- [ez_asset_metadata](https://flipsidecrypto.github.io/<evm_chain>-models/#!/model/model.fsc_evm.price__ez_asset_metadata)
|
||||
- [ez_prices_hourly](https://flipsidecrypto.github.io/<evm_chain>-models/#!/model/model.fsc_evm.price__ez_prices_hourly)
|
||||
|
||||
## **Helpful User-Defined Functions (UDFs)**
|
||||
|
||||
UDFs are custom functions built by the Flipside team that can be used in your queries to make your life easier.
|
||||
|
||||
Please visit [LiveQuery Functions Overview](https://flipsidecrypto.github.io/livequery-models/#!/overview) for a full list of helpful UDFs.
|
||||
|
||||
## **Data Model Overview**
|
||||
|
||||
The Core models are built a few different ways, but the core fact tables are built using three layers of sql models: **bronze, silver, and gold (or core).**
|
||||
|
||||
- Bronze: Data is loaded in from the source as a view
|
||||
- Silver: All necessary parsing, filtering, de-duping, and other transformations are done here
|
||||
- Gold (or Core): Final views and tables that are available publicly
|
||||
|
||||
The dimension tables are sourced from a variety of on-chain and off-chain sources.
|
||||
|
||||
Convenience views (denoted ez_) are a combination of different fact and dimension tables. These views are built to make it easier to query the data.
|
||||
|
||||
## **Using dbt docs**
|
||||
### Navigation
|
||||
|
||||
You can use the ```Project``` and ```Database``` navigation tabs on the left side of the window to explore the models in the project.
|
||||
|
||||
### Database Tab
|
||||
|
||||
This view shows relations (tables and views) grouped into database schemas. Note that ephemeral models are *not* shown in this interface, as they do not exist in the database.
|
||||
|
||||
### Graph Exploration
|
||||
|
||||
You can click the blue icon on the bottom-right corner of the page to view the lineage graph of your models.
|
||||
|
||||
On model pages, you'll see the immediate parents and children of the model you're exploring. By clicking the Expand button at the top-right of this lineage pane, you'll be able to see all of the models that are used to build, or are built from, the model you're exploring.
|
||||
|
||||
Once expanded, you'll be able to use the ```--models``` and ```--exclude``` model selection syntax to filter the models in the graph. For more information on model selection, check out the [dbt docs](https://docs.getdbt.com/docs/model-selection-syntax).
|
||||
|
||||
Note that you can also right-click on models to interactively filter and explore the graph.
|
||||
|
||||
|
||||
### **More information**
|
||||
- [Flipside](https://flipsidecrypto.xyz/)
|
||||
- [Data Studio](https://flipsidecrypto.xyz/studio)
|
||||
- [Tutorials](https://docs.flipsidecrypto.com/our-data/tutorials)
|
||||
- [Github](https://github.com/FlipsideCrypto/<evm_chain>-models)
|
||||
- [What is dbt?](https://docs.getdbt.com/docs/introduction)
|
||||
|
||||
{% enddocs %}
|
||||
46
models/sources.yml
Normal file
46
models/sources.yml
Normal file
@ -0,0 +1,46 @@
|
||||
version: 2
|
||||
|
||||
sources:
|
||||
- name: github_actions
|
||||
database: "{{ target.database }}"
|
||||
schema: github_actions
|
||||
tables:
|
||||
- name: workflows
|
||||
- name: bronze_streamline
|
||||
database: streamline
|
||||
schema: >-
|
||||
{{ var('GLOBAL_PROD_DB_NAME') ~ ('_dev' if var("STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES", False) else '') }}
|
||||
tables:
|
||||
- name: blocks
|
||||
- name: transactions
|
||||
- name: >-
|
||||
{{ 'receipts_by_hash' if var("GLOBAL_USES_RECEIPTS_BY_HASH", False) else 'receipts' }}
|
||||
- name: traces
|
||||
- name: confirm_blocks
|
||||
- name: decoded_logs
|
||||
- name: crosschain_silver
|
||||
database: "{{ 'crosschain' if target.database.upper() == var('GLOBAL_PROD_DB_NAME').upper() else 'crosschain_dev' }}"
|
||||
schema: silver
|
||||
tables:
|
||||
- name: labels_combined
|
||||
- name: complete_provider_asset_metadata
|
||||
- name: complete_native_asset_metadata
|
||||
- name: complete_native_prices
|
||||
- name: complete_provider_prices
|
||||
- name: complete_token_asset_metadata
|
||||
- name: complete_token_prices
|
||||
- name: bronze_api
|
||||
database: "{{ target.database }}"
|
||||
schema: bronze_api
|
||||
tables:
|
||||
- name: contract_abis
|
||||
- name: crosschain_public
|
||||
database: crosschain
|
||||
schema: bronze_public
|
||||
tables:
|
||||
- name: user_abis
|
||||
- name: silver
|
||||
database: "{{ target.database }}"
|
||||
schema: silver
|
||||
tables:
|
||||
- name: verified_abis
|
||||
3
packages.yml
Normal file
3
packages.yml
Normal file
@ -0,0 +1,3 @@
|
||||
packages:
|
||||
- git: https://github.com/FlipsideCrypto/fsc-evm.git
|
||||
revision: <insert-version-v3+>
|
||||
31
profiles.yml
Normal file
31
profiles.yml
Normal file
@ -0,0 +1,31 @@
|
||||
<evm_chain>:
|
||||
target: prod
|
||||
outputs:
|
||||
dev:
|
||||
type: snowflake
|
||||
account: "{{ env_var('ACCOUNT') }}"
|
||||
role: "{{ env_var('ROLE') }}"
|
||||
user: "{{ env_var('USER') }}"
|
||||
password: "{{ env_var('PASSWORD') }}"
|
||||
region: "{{ env_var('REGION') }}"
|
||||
database: "{{ env_var('DATABASE') }}"
|
||||
warehouse: "{{ env_var('WAREHOUSE') }}"
|
||||
schema: SILVER
|
||||
threads: 4
|
||||
client_session_keep_alive: False
|
||||
query_tag: curator
|
||||
prod:
|
||||
type: snowflake
|
||||
account: "{{ env_var('ACCOUNT') }}"
|
||||
role: "{{ env_var('ROLE') }}"
|
||||
user: "{{ env_var('USER') }}"
|
||||
password: "{{ env_var('PASSWORD') }}"
|
||||
region: "{{ env_var('REGION') }}"
|
||||
database: "{{ env_var('DATABASE') }}"
|
||||
warehouse: "{{ env_var('WAREHOUSE') }}"
|
||||
schema: SILVER
|
||||
threads: 4
|
||||
client_session_keep_alive: False
|
||||
query_tag: curator
|
||||
config:
|
||||
send_anonymous_usage_stats: False
|
||||
2
requirements.txt
Normal file
2
requirements.txt
Normal file
@ -0,0 +1,2 @@
|
||||
dbt-snowflake>=1.7,<1.8
|
||||
protobuf==4.25.3
|
||||
7
selectors.yml
Normal file
7
selectors.yml
Normal file
@ -0,0 +1,7 @@
|
||||
selectors:
|
||||
- name: integration_tests
|
||||
description: "Selector for integration tests"
|
||||
definition:
|
||||
union:
|
||||
- method: fqn
|
||||
value: "livequery_models.deploy.core._utils"
|
||||
0
snapshots/.gitkeep
Normal file
0
snapshots/.gitkeep
Normal file
Loading…
Reference in New Issue
Block a user