Initial commit

This commit is contained in:
Mike Stepanovic 2025-06-03 14:50:53 -06:00 committed by GitHub
commit 414792c651
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
31 changed files with 1390 additions and 0 deletions

71
.github/workflows/dbt_docs_update.yml vendored Normal file
View File

@ -0,0 +1,71 @@
name: docs_update
on:
push:
branches:
- "main"
env:
USE_VARS: "${{ vars.USE_VARS }}"
DBT_PROFILES_DIR: "${{ vars.DBT_PROFILES_DIR }}"
DBT_VERSION: "${{ vars.DBT_VERSION }}"
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "${{ vars.PYTHON_VERSION }}"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: checkout docs branch
run: |
git checkout -B docs origin/main
- name: generate dbt docs
run: |
dbt ls -t prod
dbt docs generate --no-compile -t prod
- name: move files to docs directory
run: |
mkdir -p ./docs
cp target/{catalog.json,manifest.json,index.html} docs/
- name: clean up target directory
run: dbt clean
- name: check for changes
run: git status
- name: stage changed files
run: git add .
- name: commit changed files
run: |
git config user.email "abc@xyz"
git config user.name "github-actions"
git commit -am "Auto-update docs"
- name: push changes to docs
run: |
git push -f --set-upstream origin docs

View File

@ -0,0 +1,34 @@
name: dbt_run_integration_test
run-name: ${{ github.event.inputs.branch }}
on:
workflow_dispatch:
inputs:
environment:
required: true
type: string
concurrency: ${{ github.workflow }}
jobs:
prepare_vars:
runs-on: ubuntu-latest
environment:
name: ${{ inputs.environment }}
outputs:
warehouse: ${{ steps.set_outputs.outputs.warehouse }}
steps:
- name: Set warehouse output
id: set_outputs
run: |
echo "warehouse=${{ vars.WAREHOUSE }}" >> $GITHUB_OUTPUT
called_workflow_template:
needs: prepare_vars
uses: FlipsideCrypto/analytics-workflow-templates/.github/workflows/dbt.yml@main
with:
command: >
dbt test --selector 'integration_tests'
environment: ${{ inputs.environment }}
warehouse: ${{ needs.prepare_vars.outputs.warehouse }}
secrets: inherit

67
.github/workflows/dbt_run_adhoc.yml vendored Normal file
View File

@ -0,0 +1,67 @@
name: dbt_run_adhoc
run-name: dbt_run_adhoc
on:
workflow_dispatch:
branches:
- "main"
inputs:
environment:
type: choice
description: DBT Run Environment
required: true
options:
- dev
- prod
default: dev
warehouse:
type: choice
description: Snowflake warehouse
required: true
options:
- DBT
- DBT_CLOUD
- DBT_EMERGENCY
default: DBT
dbt_command:
type: string
description: 'DBT Run Command'
required: true
env:
USE_VARS: "${{ vars.USE_VARS }}"
DBT_PROFILES_DIR: "${{ vars.DBT_PROFILES_DIR }}"
DBT_VERSION: "${{ vars.DBT_VERSION }}"
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ inputs.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_${{ inputs.environment }}
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "${{ vars.PYTHON_VERSION }}"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
${{ inputs.dbt_command }}

44
.github/workflows/dbt_run_core.yml vendored Normal file
View File

@ -0,0 +1,44 @@
name: dbt_run_core
run-name: dbt_run_core
on:
workflow_dispatch:
schedule:
- cron: "25,55 * * * *"
env:
DBT_PROFILES_DIR: "${{ vars.DBT_PROFILES_DIR }}"
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "${{ vars.PYTHON_VERSION }}"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
dbt run -m <ibc_chain>,tag:core

View File

@ -0,0 +1,46 @@
name: dbt_run_dev_refresh
run-name: dbt_run_dev_refresh
on:
workflow_dispatch:
schedule:
# Runs "at 9:00 UTC" (see https://crontab.guru)
- cron: '0 9 * * *'
env:
USE_VARS: "${{ vars.USE_VARS }}"
DBT_PROFILES_DIR: "${{ vars.DBT_PROFILES_DIR }}"
DBT_VERSION: "${{ vars.DBT_VERSION }}"
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "${{ vars.PYTHON_VERSION }}"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
dbt run-operation run_sp_create_prod_clone

44
.github/workflows/dbt_run_noncore.yml vendored Normal file
View File

@ -0,0 +1,44 @@
name: dbt_run_noncore
run-name: dbt_run_noncore
on:
workflow_dispatch:
schedule:
- cron: "25,55 0/4 * * *"
env:
DBT_PROFILES_DIR: "${{ vars.DBT_PROFILES_DIR }}"
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "${{ vars.PYTHON_VERSION }}"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
dbt run -m <ibc_chain>,tag:noncore

View File

@ -0,0 +1,45 @@
name: dbt_run_observability
run-name: dbt_run_observability
on:
workflow_dispatch:
schedule:
# Runs “At minute 0 past every 8th hour.” (see https://crontab.guru)
- cron: '0 */8 * * *'
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "${{ vars.PYTHON_VERSION }}"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
dbt run -s tag:observability

View File

@ -0,0 +1,44 @@
name: dbt_run_streamline_blocks_tx_realtime
run-name: dbt_run_streamline_blocks_tx_realtime
on:
workflow_dispatch:
schedule:
- cron: "5,35 * * * *"
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "${{ vars.PYTHON_VERSION }}"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m 2+models/streamline/silver/realtime/streamline__blocks_tx_realtime.sql;

View File

@ -0,0 +1,44 @@
name: dbt_run_streamline_transactions_realtime
run-name: dbt_run_streamline_transactions_realtime
on:
workflow_dispatch:
schedule:
- cron: "15,45 * * * *"
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "${{ vars.PYTHON_VERSION }}"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m 1+models/streamline/silver/realtime/streamline__transactions_realtime.sql;

52
.github/workflows/dbt_test_daily.yml vendored Normal file
View File

@ -0,0 +1,52 @@
name: dbt_test_daily
run-name: dbt_test_daily
on:
workflow_dispatch:
schedule:
- cron: "30 0 * * *"
env:
USE_VARS: "${{ vars.USE_VARS }}"
DBT_PROFILES_DIR: "${{ vars.DBT_PROFILES_DIR }}"
DBT_VERSION: "${{ vars.DBT_VERSION }}"
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
SLACK_WEBHOOK_URL: "${{ secrets.SLACK_WEBHOOK_URL }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "${{ vars.PYTHON_VERSION }}"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
dbt test -m tag:test_recency tag:test_quality --vars '{"TEST_HOURS_THRESHOLD":24}'
continue-on-error: true
- name: Log test results
run: |
python python/dbt_test_alert.py

52
.github/workflows/dbt_test_monthly.yml vendored Normal file
View File

@ -0,0 +1,52 @@
name: dbt_test_monthly
run-name: dbt_test_monthly
on:
workflow_dispatch:
schedule:
- cron: "0 10 28 * *"
env:
USE_VARS: "${{ vars.USE_VARS }}"
DBT_PROFILES_DIR: "${{ vars.DBT_PROFILES_DIR }}"
DBT_VERSION: "${{ vars.DBT_VERSION }}"
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
SLACK_WEBHOOK_URL: "${{ secrets.SLACK_WEBHOOK_URL }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "${{ vars.PYTHON_VERSION }}"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
dbt test -m tag:test_recency tag:test_quality --vars '{"TEST_HOURS_THRESHOLD":744}'
continue-on-error: true
- name: Log test results
run: |
python python/dbt_test_alert.py

52
.github/workflows/dbt_test_recency.yml vendored Normal file
View File

@ -0,0 +1,52 @@
name: dbt_test_recency
run-name: dbt_test_recency
on:
workflow_dispatch:
schedule:
- cron: "0 */2 * * *"
env:
USE_VARS: "${{ vars.USE_VARS }}"
DBT_PROFILES_DIR: "${{ vars.DBT_PROFILES_DIR }}"
DBT_VERSION: "${{ vars.DBT_VERSION }}"
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
SLACK_WEBHOOK_URL: "${{ secrets.SLACK_WEBHOOK_URL }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "${{ vars.PYTHON_VERSION }}"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
dbt source freshness && dbt test -m <ibc_chain>,tag:test_recency
continue-on-error: true
- name: Log test results
run: |
python python/dbt_test_alert.py

52
.github/workflows/dbt_test_weekly.yml vendored Normal file
View File

@ -0,0 +1,52 @@
name: dbt_test_weekly
run-name: dbt_test_weekly
on:
workflow_dispatch:
schedule:
- cron: "30 0 * * 1"
env:
USE_VARS: "${{ vars.USE_VARS }}"
DBT_PROFILES_DIR: "${{ vars.DBT_PROFILES_DIR }}"
DBT_VERSION: "${{ vars.DBT_VERSION }}"
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
SLACK_WEBHOOK_URL: "${{ secrets.SLACK_WEBHOOK_URL }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "${{ vars.PYTHON_VERSION }}"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
dbt test -m tag:test_recency tag:test_quality --vars '{"TEST_HOURS_THRESHOLD":168}'
continue-on-error: true
- name: Log test results
run: |
python python/dbt_test_alert.py

20
.gitignore vendored Normal file
View File

@ -0,0 +1,20 @@
target/
dbt_modules/
# newer versions of dbt use this directory instead of dbt_modules for test dependencies
dbt_packages/
logs/
.venv/
.python-version
# Visual Studio Code files
*/.vscode
*.code-workspace
.history/
**/.DS_Store
.vscode/
.env
dbt-env/
package-lock.yml

21
LICENSE Normal file
View File

@ -0,0 +1,21 @@
MIT License
Copyright (c) 2025 Flipside Crypto
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

139
README.md Normal file
View File

@ -0,0 +1,139 @@
## Repo Set Up
1. Create a new repository from the [ibc-models-template](https://github.com/FlipsideCrypto/ibc-models-template)
2. Update all references to `<ibc_chain>` to the new chain name, in lowercase by using find and replace
3. Update the fsc-ibc package version in `packages.yml` to the latest version
4. Set up the rest of the dbt project, where applicable, including but not limited to:
- `dbt_project.yml` (enable/disable packages, vars, etc.)
- `.github/workflows` (update tags, etc.)
- `overview.md` (update `<ibc_chain>`, table references, docs etc.)
- `sources.yml` (update schemas, tables etc.)
- `requirements.txt` (update dependencies)
- other files where applicable
## Profile Set Up
#### Use the following within profiles.yml
----
```yml
<chain>: -- replace <chain>/<CHAIN> with the profile or name from, remove this comment in your yml
target: dev
outputs:
dev:
type: snowflake
account: <ACCOUNT>
role: INTERNAL_DEV
user: <USERNAME>
authenticator: externalbrowser
region: us-east-1
database: <CHAIN>_DEV
warehouse: DBT
schema: silver
threads: 4
client_session_keep_alive: False
query_tag: dbt_<USERNAME>_dev
prod:
type: snowflake
account: <ACCOUNT>
role: DBT_CLOUD_<CHAIN>
user: <USERNAME>
authenticator: externalbrowser
region: us-east-1
database: <CHAIN>
warehouse: DBT_CLOUD_<CHAIN>
schema: silver
threads: 4
client_session_keep_alive: False
query_tag: dbt_<USERNAME>_dev
```
### Common DBT Run Variables
The following variables can be used to control various aspects of the dbt run. Use them with the `--vars` flag when running dbt commands.
| Variable | Description | Example Usage |
|----------|-------------|---------------|
| `UPDATE_UDFS_AND_SPS` | Update User Defined Functions and Stored Procedures. By default, this is set to False | `--vars '{"UPDATE_UDFS_AND_SPS":true}'` |
| `STREAMLINE_INVOKE_STREAMS` | Invoke Streamline processes. By default, this is set to False | `--vars '{"STREAMLINE_INVOKE_STREAMS":true}'` |
| `STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES` | Use development environment for external tables. By default, this is set to False | `--vars '{"STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES":true}'` |
| `HEAL_CURATED_MODEL` | Heal specific curated models. By default, this is set to an empty array []. See more below. | `--vars '{"HEAL_CURATED_MODEL":["axelar","across","celer_cbridge"]}'` |
| `UPDATE_SNOWFLAKE_TAGS` | Control updating of Snowflake tags. By default, this is set to False | `--vars '{"UPDATE_SNOWFLAKE_TAGS":false}'` |
| `START_GHA_TASKS` | Start GitHub Actions tasks. By default, this is set to False | `--vars '{"START_GHA_TASKS":true}'` |
#### Example Commands
1. Update UDFs and SPs:
```
dbt run --vars '{"UPDATE_UDFS_AND_SPS":true}' -m ...
```
2. Invoke Streamline and use dev for external tables:
```
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":true,"STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES":true}' -m ...
```
3. Update Snowflake tags for a specific model:
```
dbt run --vars '{"UPDATE_SNOWFLAKE_TAGS":true}' -s models/silver/utilities/silver__number_sequence.sql
```
5. Start all GHA tasks: (coming soon!)
```
dbt seed -s github_actions__workflows && dbt run -m models/github_actions --full-refresh && dbt run-operation fsc_evm.create_gha_tasks --vars '{"START_GHA_TASKS":True}'
```
6. Using two or more variables:
```
dbt run --vars '{"UPDATE_UDFS_AND_SPS":true,"STREAMLINE_INVOKE_STREAMS":true,"STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES":true}' -m ...
```
> Note: Replace `-m ...` with appropriate model selections or tags as needed for your project structure.
## FSC_IBC
`fsc_ibc` is a collection of macros, models, and other resources that are used to build the Flipside Crypto IBC models.
For more information on the `fsc_ibc` package, see the [FSC_IBC Wiki](https://github.com/FlipsideCrypto/fsc-ibc/wiki).
## Applying Model Tags
### Database / Schema level tags
Database and schema tags are applied via the `fsc_ibc.add_database_or_schema_tags` macro. These tags are inherited by their downstream objects. To add/modify tags call the appropriate tag set function within the macro.
```
{{ fsc_ibc.set_database_tag_value('SOME_DATABASE_TAG_KEY','SOME_DATABASE_TAG_VALUE') }}
{{ fsc_ibc.set_schema_tag_value('SOME_SCHEMA_TAG_KEY','SOME_SCHEMA_TAG_VALUE') }}
```
### Model tags
To add/update a model's snowflake tags, add/modify the `meta` model property under `config`. Only table level tags are supported at this time via DBT.
{% raw %}
{{ config(
...,
meta={
'database_tags':{
'table': {
'PURPOSE': 'SOME_PURPOSE'
}
}
},
...
) }}
{% endraw %}
By default, model tags are pushed to Snowflake on each load. You can disable this by setting the `UPDATE_SNOWFLAKE_TAGS` project variable to `False` during a run.
```
dbt run --vars '{"UPDATE_SNOWFLAKE_TAGS":False}' -s models/silver/utilities/silver__number_sequence.sql
```
### Querying for existing tags on a model in snowflake
```
select *
from table(<chain>.information_schema.tag_references('<chain>.core.fact_blocks', 'table'));
```

0
analysis/.gitkeep Normal file
View File

0
data/.gitkeep Normal file
View File

128
dbt_project.yml Normal file
View File

@ -0,0 +1,128 @@
# Name your project! Project names should contain only lowercase characters
# and underscores. A good package name should reflect your organization's
# name or the intended use of these models
name: "<ibc_chain>_models" # replace with the name of the chain
version: "1.0.0"
config-version: 2
# This setting configures which "profile" dbt uses for this project.
profile: "<ibc_chain>" # replace with the name of the chain
# These configurations specify where dbt should look for different types of files.
# The `source-paths` config, for example, states that models in this project can be
# found in the "models/" directory. You probably won't need to change these!
model-paths: ["models"]
analysis-paths: ["analysis"]
test-paths: ["tests"]
seed-paths: ["data"]
macro-paths: ["macros"]
snapshot-paths: ["snapshots"]
docs-paths: ["dbt_packages/fsc_ibc/doc_descriptions", "models/doc_descriptions", "models"]
target-path: "target" # directory which will store compiled SQL files
clean-targets: # directories to be removed by `dbt clean`
- "target"
- "dbt_modules"
- "dbt_packages"
tests:
<ibc_chain>_models: # replace with the name of the chain
+store_failures: true # all tests
fsc_ibc:
+store_failures: true
on-run-start:
- "{{ fsc_ibc.create_sps() }}"
- "{{ fsc_ibc.create_udfs() }}"
on-run-end:
- '{{ fsc_ibc.apply_meta_as_tags(results) }}'
dispatch:
- macro_namespace: dbt
search_order:
- <ibc_chain>-models
- dbt_snowflake_query_tags
- dbt
query-comment:
comment: '{{ dbt_snowflake_query_tags.get_query_comment(node) }}'
append: true # Snowflake removes prefixed comments.
# Configuring models
# Full documentation: https://docs.getdbt.com/docs/configuring-models
models:
<ibc_chain>_models: # replace with the name of the chain
+copy_grants: true
+persist_docs:
relation: true
columns: true
+on_schema_change: "append_new_columns"
livequery_base:
+materialized: ephemeral
fsc_ibc:
+enabled: false # disable fsc_ibc package by default
+copy_grants: true
+persist_docs:
relation: true
columns: true
+on_schema_change: "append_new_columns"
main_package:
+enabled: false # disable main_package by default, enabled other packages as needed
admin:
+enabled: true
core:
+enabled: true # enable subpackages, as needed
github_actions:
+enabled: true
labels:
+enabled: true
observability:
+enabled: true
prices:
+enabled: true
utils:
+enabled: true
scores_package:
+enabled: false
vars:
"dbt_date:time_zone": GMT
STREAMLINE_INVOKE_STREAMS: False
STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES: False
UPDATE_UDFS_AND_SPS: False
UPDATE_SNOWFLAKE_TAGS: True
BRONZE_LOOKBACK_DAYS: '{{ env_var("BRONZE_LOOKBACK_DAYS", 3) }}'
#### STREAMLINE 2.0 BEGIN ####
API_INTEGRATION: '{{ var("config")[target.name]["API_INTEGRATION"] if var("config")[target.name] else var("config")["dev"]["API_INTEGRATION"] }}'
EXTERNAL_FUNCTION_URI: '{{ var("config")[target.name]["EXTERNAL_FUNCTION_URI"] if var("config")[target.name] else var("config")["dev"]["EXTERNAL_FUNCTION_URI"] }}'
ROLES: |
["INTERNAL_DEV"]
config:
# The keys correspond to dbt profiles and are case sensitive
dev:
API_INTEGRATION: AWS_<IBC_CHAIN>_API_STG_V2
EXTERNAL_FUNCTION_URI: <insert_uri>.execute-api.us-east-1.amazonaws.com/stg/
ROLES:
- AWS_LAMBDA_<IBC_CHAIN>_API # replace with the name of the chain
- INTERNAL_DEV
prod:
API_INTEGRATION: AWS_<IBC_CHAIN>_API_PROD_V2
EXTERNAL_FUNCTION_URI: <insert_uri>.execute-api.us-east-1.amazonaws.com/prod/
ROLES:
- AWS_LAMBDA_<IBC_CHAIN>_API # replace with the name of the chain
- INTERNAL_DEV
- DBT_CLOUD_<IBC_CHAIN> # replace with the name of the chain
#### STREAMLINE 2.0 END ####
#### FSC_IBC BEGIN ####
# Please visit https://github.com/FlipsideCrypto/fsc-evm/wiki for more information on required and optional variables
#### FSC_IBC END ####

View File

@ -0,0 +1,11 @@
{% macro generate_schema_name(custom_schema_name=none, node=none) -%}
{% set node_name = node.name %}
{% set split_name = node_name.split('__') %}
{{ split_name[0] | trim }}
{%- endmacro %}
{% macro generate_alias_name(custom_alias_name=none, node=none) -%}
{% set node_name = node.name %}
{% set split_name = node_name.split('__') %}
{{ split_name[1] | trim }}
{%- endmacro %}

View File

@ -0,0 +1,44 @@
{% macro get_merge_sql(
target,
source,
unique_key,
dest_columns,
incremental_predicates
) -%}
{% set predicate_override = "" %}
{% if incremental_predicates [0] == "dynamic_range" %}
-- run some queries to dynamically determine the min + max of this 'input_column' in the new data
{% set input_column = incremental_predicates [1] %}
{% set get_limits_query %}
SELECT
MIN(
{{ input_column }}
) AS lower_limit,
MAX(
{{ input_column }}
) AS upper_limit
FROM
{{ source }}
{% endset %}
{% set limits = run_query(get_limits_query) [0] %}
{% set lower_limit,
upper_limit = limits [0],
limits [1] %}
-- use those calculated min + max values to limit 'target' scan, to only the days with new data
{% set predicate_override %}
dbt_internal_dest.{{ input_column }} BETWEEN '{{ lower_limit }}'
AND '{{ upper_limit }}' {% endset %}
{% endif %}
{% set predicates = [predicate_override] if predicate_override else incremental_predicates %}
-- standard merge from here
{% set merge_sql = dbt.get_merge_sql(
target,
source,
unique_key,
dest_columns,
predicates
) %}
{{ return(merge_sql) }}
{% endmacro %}

View File

@ -0,0 +1,8 @@
{% macro dbt_snowflake_get_tmp_relation_type(
strategy,
unique_key,
language
) %}
-- always table
{{ return('table') }}
{% endmacro %}

66
makefile Normal file
View File

@ -0,0 +1,66 @@
DBT_TARGET ?= dev
cleanup_time:
@set -e; \
rm -f package-lock.yml && dbt clean && dbt deps
deploy_livequery:
@set -e; \
dbt run-operation fsc_ibc.drop_livequery_schemas --vars '{"UPDATE_UDFS_AND_SPS": true}' -t $(DBT_TARGET); \
dbt run -m livequery_models.deploy.core --vars '{"UPDATE_UDFS_AND_SPS": true}' -t $(DBT_TARGET); \
dbt run-operation fsc_ibc.livequery_grants --vars '{"UPDATE_UDFS_AND_SPS": true}' -t $(DBT_TARGET)
deploy_chain_phase_1:
@set -e; \
dbt run -m livequery_models.deploy.core --vars '{"UPDATE_UDFS_AND_SPS": true}' -t $(DBT_TARGET); \
dbt run-operation fsc_ibc.livequery_grants --vars '{"UPDATE_UDFS_AND_SPS": true}' -t $(DBT_TARGET); \
dbt run-operation fsc_utils.create_udfs --vars '{"UPDATE_UDFS_AND_SPS": true}' -t $(DBT_TARGET); \
if [ "$(DBT_TARGET)" != "prod" ]; then \
dbt run -m "fsc_ibc,tag:phase_1" --full-refresh --vars '{"STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES":true, "MAIN_SL_NEW_BUILD_ENABLED": true, "GLOBAL_STREAMLINE_FR_ENABLED": true}' -t $(DBT_TARGET); \
dbt test -m "fsc_ibc,tag:chainhead"; \
dbt run -m "fsc_ibc,tag:streamline,tag:core,tag:complete" "fsc_ibc,tag:streamline,tag:core,tag:realtime" --vars '{"MAIN_SL_NEW_BUILD_ENABLED": true, "STREAMLINE_INVOKE_STREAMS":True, "MAIN_SL_TESTING_LIMIT": 500}' -t $(DBT_TARGET); \
else \
dbt run -m "fsc_ibc,tag:phase_1" --full-refresh --vars '{"MAIN_SL_NEW_BUILD_ENABLED": true, "GLOBAL_STREAMLINE_FR_ENABLED": true}' -t $(DBT_TARGET); \
dbt test -m "fsc_ibc,tag:chainhead"; \
dbt run -m "fsc_ibc,tag:streamline,tag:core,tag:complete" "fsc_ibc,tag:streamline,tag:core,tag:realtime" --vars '{"MAIN_SL_NEW_BUILD_ENABLED": true, "STREAMLINE_INVOKE_STREAMS":True}' -t $(DBT_TARGET); \
fi; \
echo "# wait ~10 minutes"; \
echo "# run deploy_chain_phase_2"
deploy_chain_phase_2:
@set -e; \
if [ "$(DBT_TARGET)" != "prod" ]; then \
dbt run -m "fsc_ibc,tag:phase_2" --full-refresh --vars '{"STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES":true, "GLOBAL_STREAMLINE_FR_ENABLED": true, "GLOBAL_BRONZE_FR_ENABLED": true, "GLOBAL_SILVER_FR_ENABLED": true, "GLOBAL_GOLD_FR_ENABLED": true, "GLOBAL_NEW_BUILD_ENABLED": true}' -t $(DBT_TARGET); \
dbt run -m "fsc_ibc,tag:streamlines,tag:realtime" "fsc_ibc,tag:streamline,tag:complete" --vars '{"STREAMLINE_INVOKE_STREAMS":True, "DECODER_SL_NEW_BUILD_ENABLED": true}' -t $(DBT_TARGET); \
else \
dbt run -m "fsc_ibc,tag:phase_2" --full-refresh --vars '{"GLOBAL_STREAMLINE_FR_ENABLED": true, "GLOBAL_BRONZE_FR_ENABLED": true, "GLOBAL_SILVER_FR_ENABLED": true, "GLOBAL_GOLD_FR_ENABLED": true, "GLOBAL_NEW_BUILD_ENABLED": true}' -t $(DBT_TARGET); \
dbt run -m "fsc_ibc,tag:streamline,tag:realtime" "fsc_ibc,tag:streamline,tag:complete" --vars '{"STREAMLINE_INVOKE_STREAMS":True, "DECODER_SL_NEW_BUILD_ENABLED": true}' -t $(DBT_TARGET); \
fi; \
echo "# wait ~10 minutes"; \
echo "# run deploy_chain_phase_3"
deploy_chain_phase_3:
@set -e; \
if [ "$(DBT_TARGET)" != "prod" ]; then \
dbt run -m "fsc_ibc,tag:phase_2" --vars '{"STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES":true}' -t $(DBT_TARGET); \
dbt run -m "fsc_ibc,tag:phase_3" --full-refresh --vars '{"STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES":true, "GLOBAL_STREAMLINE_FR_ENABLED": true, "GLOBAL_SILVER_FR_ENABLED": true, "GLOBAL_GOLD_FR_ENABLED": true, "GLOBAL_NEW_BUILD_ENABLED": true}' -t $(DBT_TARGET); \
dbt run -m "fsc_ibc,tag:silver" "fsc_ibc,tag:streamline,tag:realtime" "fsc_ibc,tag:streamline,tag:complete" --vars '{"STREAMLINE_INVOKE_STREAMS":True, "DECODER_SL_TESTING_LIMIT": 500}' -t $(DBT_TARGET); \
else \
dbt run -m "fsc_ibc,tag:phase_2" -t $(DBT_TARGET); \
dbt run -m "fsc_ibc,tag:phase_3" --full-refresh --vars '{"GLOBAL_STREAMLINE_FR_ENABLED": true, "GLOBAL_SILVER_FR_ENABLED": true, "GLOBAL_GOLD_FR_ENABLED": true, "GLOBAL_NEW_BUILD_ENABLED": true}' -t $(DBT_TARGET); \
dbt run -m "fsc_ibc,tag:silver" "fsc_ibc,tag:streamline,tag:realtime" "fsc_ibc,tag:streamline,tag:complete" --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -t $(DBT_TARGET); \
fi; \
echo "# wait ~10 minutes"; \
echo "# run deploy_chain_phase_4"
deploy_chain_phase_4:
@set -e; \
if [ "$(DBT_TARGET)" != "prod" ]; then \
dbt run -m "fsc_ibc,tag:phase_3" --vars '{"STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES":true}' -t $(DBT_TARGET); \
dbt run -m "fsc_ibc,tag:phase_4" --full-refresh -t $(DBT_TARGET); \
else \
dbt run -m "fsc_ibc,tag:phase_3" -t $(DBT_TARGET); \
dbt run -m "fsc_ibc,tag:phase_4" --full-refresh -t $(DBT_TARGET); \
fi; \
.PHONY: cleanup_time deploy_livequery deploy_chain_phase_1 deploy_chain_phase_2 deploy_chain_phase_3 deploy_chain_phase_4

82
models/__overview__.md Normal file
View File

@ -0,0 +1,82 @@
{% docs __overview__ %}
# Welcome to the Flipside Crypto <ibc_chain> Models Documentation!
## **What does this documentation cover?**
The documentation included here details the design of the <ibc_chain> tables and views available via [Flipside Crypto.](https://flipsidecrypto.xyz/) For more information on how these models are built, please see [the github repository.](https://github.com/FlipsideCrypto/<ibc_chain>-models)
## **How do I use these docs?**
The easiest way to navigate this documentation is to use the Quick Links below. These links will take you to the documentation for each table, which contains a description, a list of the columns, and other helpful information.
If you are experienced with dbt docs, feel free to use the sidebar to navigate the documentation, as well as explore the relationships between tables and the logic building them.
There is more information on how to use dbt docs in the last section of this document.
## **Quick Links to Table Documentation**
**Click on the links below to jump to the documentation for each schema.**
### Core Tables (<ibc_chain>.core)
**Dimension Tables:**
- [dim_labels](https://flipsidecrypto.github.io/<ibc_chain>/#!/model/model.<ibc_chain>_models.core__dim_labels)
**Fact Tables:**
- [fact_blocks](https://flipsidecrypto.github.io/<ibc_chain>/#!/model/model.<ibc_chain>_models.core__fact_blocks)
- [fact_msgs](https://flipsidecrypto.github.io/<ibc_chain>/#!/model/model.<ibc_chain>_models.core__fact_msgs)
- [fact_msg_attributes](https://flipsidecrypto.github.io/<ibc_chain>/#!/model/model.<ibc_chain>_models.core__fact_msg_attributes)
- [fact_transactions](https://flipsidecrypto.github.io/<ibc_chain>/#!/model/model.<ibc_chain>_models.core__fact_transactions)
- [fact_transfers](https://flipsidecrypto.github.io/<ibc_chain>/#!/model/model.<ibc_chain>_models.core__fact_transfers)
### Price Tables (<ibc_chain>.price)
- [dim_asset_metadata](https://flipsidecrypto.github.io/<ibc_chain>/#!/model/model.<ibc_chain>_models.price__dim_asset_metadata)
- [fact_prices_ohlc_hourly](https://flipsidecrypto.github.io/<ibc_chain>/#!/model/model.<ibc_chain>_models.price__fact_prices_ohlc_hourly)
- [ez_asset_metadata](https://flipsidecrypto.github.io/<ibc_chain>/#!/model/model.<ibc_chain>_models.price__ez_asset_metadata)
- [ez_prices_hourly](https://flipsidecrypto.github.io/<ibc_chain>/#!/model/model.<ibc_chain>_models.price__ez_prices_hourly)
## **Helpful User-Defined Functions (UDFs)**
UDFs are custom functions built by the Flipside team that can be used in your queries to make your life easier.
Please visit [LiveQuery Functions Overview](https://flipsidecrypto.github.io/livequery-models/#!/overview) for a full list of helpful UDFs.
## **Data Model Overview**
The Core models are built a few different ways, but the core fact tables are built using three layers of sql models: **bronze, silver, and gold (or core).**
- Bronze: Data is loaded in from the source as a view
- Silver: All necessary parsing, filtering, de-duping, and other transformations are done here
- Gold (or Core): Final views and tables that are available publicly
The dimension tables are sourced from a variety of on-chain and off-chain sources.
Convenience views (denoted ez_) are a combination of different fact and dimension tables. These views are built to make it easier to query the data.
## **Using dbt docs**
### Navigation
You can use the ```Project``` and ```Database``` navigation tabs on the left side of the window to explore the models in the project.
### Database Tab
This view shows relations (tables and views) grouped into database schemas. Note that ephemeral models are *not* shown in this interface, as they do not exist in the database.
### Graph Exploration
You can click the blue icon on the bottom-right corner of the page to view the lineage graph of your models.
On model pages, you'll see the immediate parents and children of the model you're exploring. By clicking the Expand button at the top-right of this lineage pane, you'll be able to see all of the models that are used to build, or are built from, the model you're exploring.
Once expanded, you'll be able to use the ```--models``` and ```--exclude``` model selection syntax to filter the models in the graph. For more information on model selection, check out the [dbt docs](https://docs.getdbt.com/docs/model-selection-syntax).
Note that you can also right-click on models to interactively filter and explore the graph.
### **More information**
- [Flipside](https://flipsidecrypto.xyz/)
- [Data Studio](https://flipsidecrypto.xyz/studio)
- [Tutorials](https://docs.flipsidecrypto.com/our-data/tutorials)
- [Github](https://github.com/FlipsideCrypto/<ibc_chain>-models)
- [What is dbt?](https://docs.getdbt.com/docs/introduction)
{% enddocs %}

51
models/sources.yml Normal file
View File

@ -0,0 +1,51 @@
version: 2
sources:
- name: github_actions
database: "{{ target.database }}"
schema: github_actions
tables:
- name: workflows
- name: bronze_streamline
database: streamline
schema: |
{{ target.database.upper() | replace('_DEV', '') ~ '_DEV' if var("STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES", False) else target.database.upper() | replace('_DEV', '') }}
tables:
- name: blocks
- name: transactions
- name: tx_counts
- name: complete_streamline
database: "{{ target.database }}"
schema: streamline
tables:
- name: complete_contract_abis
- name: crosschain_silver
database: >-
{{ 'CROSSCHAIN_DEV' if '_DEV' in target.database.upper() else 'CROSSCHAIN' }}
schema: silver
tables:
- name: labels_combined
- name: number_sequence
- name: complete_provider_asset_metadata
- name: complete_native_asset_metadata
- name: complete_native_prices
- name: complete_provider_prices
- name: complete_token_asset_metadata
- name: complete_token_prices
- name: crosschain_public
database: crosschain
schema: bronze_public
tables:
- name: user_abis
- name: silver
database: "{{ target.database }}"
schema: silver
tables:
- name: verified_abis
- name: fsc_evm_admin
database: >-
{{ 'FSC_EVM_DEV' if '_DEV' in target.database.upper() else 'FSC_EVM' }}
schema: admin
tables:
- name: _master_keys
- name: rpc_node_logs

3
packages.yml Normal file
View File

@ -0,0 +1,3 @@
packages:
- git: https://github.com/FlipsideCrypto/fsc-ibc.git
revision: <insert-version>

31
profiles.yml Normal file
View File

@ -0,0 +1,31 @@
<ibc_chain>:
target: prod
outputs:
dev:
type: snowflake
account: "{{ env_var('ACCOUNT') }}"
role: "{{ env_var('ROLE') }}"
user: "{{ env_var('USER') }}"
password: "{{ env_var('PASSWORD') }}"
region: "{{ env_var('REGION') }}"
database: "{{ env_var('DATABASE') }}"
warehouse: "{{ env_var('WAREHOUSE') }}"
schema: SILVER
threads: 4
client_session_keep_alive: False
query_tag: curator
prod:
type: snowflake
account: "{{ env_var('ACCOUNT') }}"
role: "{{ env_var('ROLE') }}"
user: "{{ env_var('USER') }}"
password: "{{ env_var('PASSWORD') }}"
region: "{{ env_var('REGION') }}"
database: "{{ env_var('DATABASE') }}"
warehouse: "{{ env_var('WAREHOUSE') }}"
schema: SILVER
threads: 4
client_session_keep_alive: False
query_tag: curator
config:
send_anonymous_usage_stats: False

130
python/dbt_test_alert.py Normal file
View File

@ -0,0 +1,130 @@
import datetime
import requests
import json
import sys
import os
def log_test_result():
"""Reads the run_results.json file and returns a dictionary of targeted test results"""
filepath = "target/run_results.json"
with open(filepath) as f:
run = json.load(f)
logs = []
messages = {
"fail": [],
"warn": []
}
test_count = 0
warn_count = 0
fail_count = 0
for test in run["results"]:
test_count += 1
if test["status"] != "pass":
logs.append(test)
message = f"{test['failures']} record failure(s) in {test['unique_id']}"
if test["status"] == "warn":
messages["warn"].append(message)
warn_count += 1
elif test["status"] == "fail":
messages["fail"].append(message)
fail_count += 1
dbt_test_result = {
"logs": logs,
"messages": messages,
"test_count": test_count,
"warn_count": warn_count,
"fail_count": fail_count,
"elapsed_time": str(datetime.timedelta(seconds=run["elapsed_time"]))
}
return dbt_test_result
def create_message(**kwargs):
messageBody = {
"text": f"Hey{' <!here>' if len(kwargs['messages']['fail']) > 0 else ''}, new DBT test results for :{os.environ.get('DATABASE').split('_DEV')[0]}: {os.environ.get('DATABASE')}",
"attachments": [
{
"color": kwargs["color"],
"fields": [
{
"title": "Total Tests Run",
"value": kwargs["test_count"],
"short": True
},
{
"title": "Total Time Elapsed",
"value": kwargs["elapsed_time"],
"short": True
},
{
"title": "Number of Unsuccessful Tests",
"value": f"Fail: {kwargs['fail_count']}, Warn: {kwargs['warn_count']}",
"short": True
},
{
"title": "Failed Tests:",
"value": "\n".join(kwargs["messages"]["fail"]) if len(kwargs["messages"]["fail"]) > 0 else "None :)",
"short": False
}
],
"actions": [
{
"type": "button",
"text": "View Warnings",
"style": "primary",
"url": "https://github.com/FlipsideCrypto/<ibc_chain>-models/actions",
"confirm": {
"title": f"{kwargs['warn_count']} Warnings",
"text": "\n".join(kwargs["messages"]["warn"]) if len(kwargs["messages"]["warn"]) > 0 else "None :)",
"ok_text": "Continue to GHA",
"dismiss_text": "Dismiss"
}
}
]
}
]
}
return messageBody
def send_alert(webhook_url):
"""Sends a message to a slack channel"""
url = webhook_url
data = log_test_result()
send_message = create_message(
fail_count=data["fail_count"],
warn_count=data["warn_count"],
test_count=data["test_count"],
messages=data["messages"],
elapsed_time=data["elapsed_time"],
color="#f44336" if data["fail_count"] > 0 else "#4CAF50"
)
x = requests.post(url, json=send_message)
# test config to continue on error in workflow, so we want to exit with a non-zero code if there are any failures
if data['fail_count'] > 0:
sys.exit(1)
if __name__ == '__main__':
webhook_url = os.environ.get("SLACK_WEBHOOK_URL")
data = log_test_result()
# Only send an alert if there are failures
if data['fail_count'] > 0:
send_alert(webhook_url)

2
requirements.txt Normal file
View File

@ -0,0 +1,2 @@
dbt-snowflake>=1.7,<1.8
protobuf==4.25.3

7
selectors.yml Normal file
View File

@ -0,0 +1,7 @@
selectors:
- name: integration_tests
description: "Selector for integration tests"
definition:
union:
- method: fqn
value: "livequery_models.deploy.core._utils"

0
snapshots/.gitkeep Normal file
View File