Merge pull request #1 from FlipsideCrypto/PR1---bronze-and-silver

PR1 bronze and silver & everything else
This commit is contained in:
eric-laurello 2025-03-06 11:29:14 -05:00 committed by GitHub
commit 76a25bd707
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
271 changed files with 12111 additions and 0 deletions

71
.github/workflows/dbt_docs_update.yml vendored Normal file
View File

@ -0,0 +1,71 @@
name: docs_update
on:
push:
branches:
- "main"
env:
USE_VARS: "${{ vars.USE_VARS }}"
DBT_PROFILES_DIR: "${{ vars.DBT_PROFILES_DIR }}"
DBT_VERSION: "${{ vars.DBT_VERSION }}"
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "${{ vars.PYTHON_VERSION }}"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: checkout docs branch
run: |
git checkout -B docs origin/main
- name: generate dbt docs
run: |
dbt ls -t prod
dbt docs generate --no-compile -t prod
- name: move files to docs directory
run: |
mkdir -p ./docs
cp target/{catalog.json,manifest.json,index.html} docs/
- name: clean up target directory
run: dbt clean
- name: check for changes
run: git status
- name: stage changed files
run: git add .
- name: commit changed files
run: |
git config user.email "abc@xyz"
git config user.name "github-actions"
git commit -am "Auto-update docs"
- name: push changes to docs
run: |
git push -f --set-upstream origin docs

67
.github/workflows/dbt_run_adhoc.yml vendored Normal file
View File

@ -0,0 +1,67 @@
name: dbt_run_adhoc
run-name: dbt_run_adhoc
on:
workflow_dispatch:
branches:
- "main"
inputs:
environment:
type: choice
description: DBT Run Environment
required: true
options:
- dev
- prod
default: dev
warehouse:
type: choice
description: Snowflake warehouse
required: true
options:
- DBT
- DBT_CLOUD
- DBT_EMERGENCY
default: DBT
dbt_command:
type: string
description: 'DBT Run Command'
required: true
env:
USE_VARS: "${{ vars.USE_VARS }}"
DBT_PROFILES_DIR: "${{ vars.DBT_PROFILES_DIR }}"
DBT_VERSION: "${{ vars.DBT_VERSION }}"
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ inputs.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_${{ inputs.environment }}
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "${{ vars.PYTHON_VERSION }}"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
${{ inputs.dbt_command }}

View File

@ -0,0 +1,44 @@
name: dbt_run_dev_refresh
run-name: dbt_run_dev_refresh
on:
workflow_dispatch:
# schedule:
# - cron: '27 8 * * *'
env:
DBT_PROFILES_DIR: "${{ vars.DBT_PROFILES_DIR }}"
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "${{ vars.PYTHON_VERSION }}"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
dbt run-operation run_sp_create_prod_clone

View File

@ -0,0 +1,46 @@
name: dbt_run_incremental_core
run-name: dbt_run_incremental_core
on:
workflow_dispatch:
schedule:
- cron: '*/95 * * * *'
env:
DBT_PROFILES_DIR: "${{ vars.DBT_PROFILES_DIR }}"
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "${{ vars.PYTHON_VERSION }}"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
dbt run -s "maya_models,tag:scheduled_core"

42
.github/workflows/dbt_test.yml vendored Normal file
View File

@ -0,0 +1,42 @@
name: dbt_test_scheduled
run-name: dbt_test_scheduled
on:
workflow_dispatch:
schedule:
- cron: '0 1 * * *'
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ secrets.ACCOUNT }}"
ROLE: "${{ secrets.ROLE }}"
USER: "${{ secrets.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ secrets.REGION }}"
DATABASE: "${{ secrets.DATABASE }}"
WAREHOUSE: "${{ secrets.WAREHOUSE }}"
SCHEMA: "${{ secrets.SCHEMA }}"
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10.x"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
dbt test -m "maya_models,models/bronze" "maya_models,models/silver" "maya_models,models/gold"

20
.gitignore vendored Normal file
View File

@ -0,0 +1,20 @@
target/
dbt_modules/
# newer versions of dbt use this directory instead of dbt_modules for test dependencies
dbt_packages/
logs/
.venv/
.python-version
dbt-env/
venv/
# Visual Studio Code files
*/.vscode
*.code-workspace
.history/
**/.DS_Store
.vscode/
.env
.DS_Store
.user.yml

74
README.md Normal file
View File

@ -0,0 +1,74 @@
## Profile Set Up
#### Use the following within profiles.yml
----
```yml
maya:
target: dev
outputs:
dev:
type: snowflake
account: <ACCOUNT>
role: <ROLE>
user: <USERNAME>
password: <PASSWORD>
region: <REGION>
database: maya_DEV
warehouse: <WAREHOUSE>
schema: silver
threads: 4
client_session_keep_alive: False
query_tag: <TAG>
```
### Resources:
- Learn more about dbt [in the docs](https://docs.getdbt.com/docs/introduction)
- Check out [Discourse](https://discourse.getdbt.com/) for commonly asked questions and answers
- Join the [chat](https://community.getdbt.com/) on Slack for live discussions and support
- Find [dbt events](https://events.getdbt.com) near you
- Check out [the blog](https://blog.getdbt.com/) for the latest news on dbt's development and best practices
- Check out [the blog](https://blog.getdbt.com/) for the latest news on dbt's development and best practices
## Applying Model Tags
### Database / Schema level tags
Database and schema tags are applied via the `add_database_or_schema_tags` macro. These tags are inherited by their downstream objects. To add/modify tags call the appropriate tag set function within the macro.
```
{{ set_database_tag_value('SOME_DATABASE_TAG_KEY','SOME_DATABASE_TAG_VALUE') }}
{{ set_schema_tag_value('SOME_SCHEMA_TAG_KEY','SOME_SCHEMA_TAG_VALUE') }}
```
### Model tags
To add/update a model's snowflake tags, add/modify the `meta` model property under `config`. Only table level tags are supported at this time via DBT.
```
{{ config(
...,
meta={
'database_tags':{
'table': {
'PURPOSE': 'SOME_PURPOSE'
}
}
},
...
) }}
```
By default, model tags are not pushed to snowflake on each load. You can push a tag update for a model by specifying the `UPDATE_SNOWFLAKE_TAGS` project variable during a run.
```
dbt run --var '{"UPDATE_SNOWFLAKE_TAGS":True}' -s models/core/core__fact_swaps.sql
```
### Querying for existing tags on a model in snowflake
```
select *
from table(maya.information_schema.tag_references('maya.core.fact_blocks', 'table'));
```

73
dbt_project.yml Normal file
View File

@ -0,0 +1,73 @@
# Name your project! Project names should contain only lowercase characters
# and underscores. A good package name should reflect your organization's
# name or the intended use of these models
name: "maya_models"
version: "1.0.0"
config-version: 2
require-dbt-version: ">=1.8.0"
# This setting configures which "profile" dbt uses for this project.
profile: "maya"
# These configurations specify where dbt should look for different types of files.
# The `model-paths` config, for example, states that models in this project can be
# found in the "models/" directory. You probably won't need to change these!
model-paths: ["models"]
analysis-paths: ["analysis"]
test-paths: ["tests"]
seed-paths: ["data"]
macro-paths: ["macros"]
snapshot-paths: ["snapshots"]
target-path: "target" # directory which will store compiled SQL files
clean-targets: # directories to be removed by `dbt clean`
- "target"
- "dbt_modules"
- "dbt_packages"
# Configuring models
# Full documentation: https://docs.getdbt.com/docs/configuring-models
# In this example config, we tell dbt to build all models in the example/ directory
# as tables. These settings can be overridden in the individual model files
# using the `{{ config(...) }}` macro.
models:
+copy_grants: true
+persist_docs:
relation: true
columns: true
+on_schema_change: "append_new_columns"
livequery_models:
deploy:
core:
materialized: ephemeral
data_tests:
maya_models: # replace with the name of the chain
+store_failures: true # all tests
on-run-start:
- '{{create_sps()}}'
- '{{create_udfs()}}'
dispatch:
- macro_namespace: dbt
search_order:
- maya-models
- dbt_snowflake_query_tags
- dbt
query-comment:
comment: '{{ dbt_snowflake_query_tags.get_query_comment(node) }}'
append: true # Snowflake removes prefixed comments.
vars:
"dbt_date:time_zone": GMT
OBSERV_FULL_TEST: False
START_GHA_TASKS: False
UPDATE_SNOWFLAKE_TAGS: True
UPDATE_UDFS_AND_SPS: True

0
docs/.gitkeep Normal file
View File

0
macros/.gitkeep Normal file
View File

7
macros/create_sps.sql Normal file
View File

@ -0,0 +1,7 @@
{% macro create_sps() %}
{% if target.database == 'MAYA' %}
CREATE schema IF NOT EXISTS _internal;
{{ sp_create_prod_clone('_internal') }};
{% endif %}
{% endmacro %}
{% endmacro %}

2
macros/create_udfs.sql Normal file
View File

@ -0,0 +1,2 @@
{% macro create_udfs() %}
{% endmacro %}

View File

@ -0,0 +1,33 @@
{% macro generate_schema_name(
custom_schema_name = none,
node = none
) -%}
{% set node_name = node.name %}
{% set split_name = node_name.split('__') %}
{{ split_name [0] | trim }}
{%- endmacro %}
{% macro generate_alias_name(
custom_alias_name = none,
node = none
) -%}
{% set node_name = node.name %}
{% set split_name = node_name.split('__') %}
{% if split_name | length < 2 %}
{{ split_name [0] | trim }}
{% else %}
{{ split_name [1] | trim }}
{% endif %}
{%- endmacro %}
{% macro generate_tmp_view_name(model_name) -%}
{% set node_name = model_name.name %}
{% set split_name = node_name.split('__') %}
{{ target.database ~ '.' ~ split_name[0] ~ '.' ~ split_name [1] ~ '__dbt_tmp' | trim }}
{%- endmacro %}
{% macro generate_view_name(model_name) -%}
{% set node_name = model_name.name %}
{% set split_name = node_name.split('__') %}
{{ target.database ~ '.' ~ split_name[0] ~ '.' ~ split_name [1] | trim }}
{%- endmacro %}

View File

@ -0,0 +1,11 @@
{% macro set_query_tag() -%}
{% set new_json = {"repo":project_name, "object":this.table, "profile":target.profile_name, "env":target.name, "existing_tag":get_current_query_tag() } %}
{% set new_query_tag = tojson(new_json) | as_text %}
{% if new_query_tag %}
{% set original_query_tag = get_current_query_tag() %}
{{ log("Setting query_tag to '" ~ new_query_tag ~ "'. Will reset to '" ~ original_query_tag ~ "' after materialization.") }}
{% do run_query("alter session set query_tag = '{}'".format(new_query_tag)) %}
{{ return(original_query_tag)}}
{% endif %}
{{ return(none)}}
{% endmacro %}

30
macros/dbt/get_merge.sql Normal file
View File

@ -0,0 +1,30 @@
-- incremental_strategy="merge"
{% macro get_merge_sql(target, source, unique_key, dest_columns, incremental_predicates) -%}
{% set merge_sql = fsc_utils.get_merge_sql(target, source, unique_key, dest_columns, incremental_predicates) %}
{{ return(merge_sql) }}
{% endmacro %}
-- incremental_strategy="delete+insert"
{% macro get_delete_insert_merge_sql(target, source, unique_key, dest_columns, incremental_predicates) -%}
{% set predicate_override = "" %}
-- get the min value of column
{% if incremental_predicates[0] == "min_value_predicate" %}
{% set min_column_name = incremental_predicates[1] %}
{% set query %}
select min({{ min_column_name }}) from {{ source }}
{% endset %}
{% set min_block = run_query(query).columns[0][0] %}
{% if min_block is not none %}
{% set predicate_override %}
round({{ target }}.{{ min_column_name }},-5) >= round({{ min_block }},-5)
{% endset %}
{% else %}
{% set predicate_override = "1=1" %}
{% endif %}
{% endif %}
{% set predicates = [predicate_override] + incremental_predicates[2:] if predicate_override else incremental_predicates %}
-- standard delete+insert from here
{% set merge_sql = dbt.get_delete_insert_merge_sql(target, source, unique_key, dest_columns, predicates) %}
{{ return(merge_sql) }}
{% endmacro %}

View File

@ -0,0 +1,4 @@
{% macro dbt_snowflake_get_tmp_relation_type(strategy, unique_key, language) %}
-- always table
{{ return('table') }}
{% endmacro %}

View File

@ -0,0 +1,10 @@
{% macro run_sp_create_prod_clone() %}
{% set clone_query %}
call maya._internal.create_prod_clone(
'maya',
'maya_dev',
'internal_dev'
);
{% endset %}
{% do run_query(clone_query) %}
{% endmacro %}

View File

@ -0,0 +1,49 @@
{% macro sp_create_prod_clone(target_schema) -%}
create or replace procedure {{ target_schema }}.create_prod_clone(source_db_name string, destination_db_name string, role_name string)
returns boolean
language javascript
execute as caller
as
$$
snowflake.execute({sqlText: `BEGIN TRANSACTION;`});
try {
snowflake.execute({sqlText: `CREATE OR REPLACE DATABASE ${DESTINATION_DB_NAME} CLONE ${SOURCE_DB_NAME}`});
snowflake.execute({sqlText: `DROP SCHEMA IF EXISTS ${DESTINATION_DB_NAME}._INTERNAL`}); /* this only needs to be in prod */
snowflake.execute({sqlText: `GRANT OWNERSHIP ON ALL SCHEMAS IN DATABASE ${DESTINATION_DB_NAME} TO ROLE ${ROLE_NAME} COPY CURRENT GRANTS;`});
snowflake.execute({sqlText: `GRANT OWNERSHIP ON ALL FUNCTIONS IN DATABASE ${DESTINATION_DB_NAME} TO ROLE ${ROLE_NAME} COPY CURRENT GRANTS;`});
snowflake.execute({sqlText: `GRANT OWNERSHIP ON ALL PROCEDURES IN DATABASE ${DESTINATION_DB_NAME} TO ROLE ${ROLE_NAME} COPY CURRENT GRANTS;`});
snowflake.execute({sqlText: `GRANT OWNERSHIP ON ALL VIEWS IN DATABASE ${DESTINATION_DB_NAME} TO ROLE ${ROLE_NAME} COPY CURRENT GRANTS;`});
snowflake.execute({sqlText: `GRANT OWNERSHIP ON ALL STAGES IN DATABASE ${DESTINATION_DB_NAME} TO ROLE ${ROLE_NAME} COPY CURRENT GRANTS;`});
snowflake.execute({sqlText: `GRANT OWNERSHIP ON ALL TABLES IN DATABASE ${DESTINATION_DB_NAME} TO ROLE ${ROLE_NAME} COPY CURRENT GRANTS;`});
snowflake.execute({sqlText: `REVOKE OWNERSHIP ON FUTURE FUNCTIONS IN DATABASE ${DESTINATION_DB_NAME} FROM ROLE DBT_CLOUD_MAYA;`});
snowflake.execute({sqlText: `REVOKE OWNERSHIP ON FUTURE PROCEDURES IN DATABASE ${DESTINATION_DB_NAME} FROM ROLE DBT_CLOUD_MAYA;`});
snowflake.execute({sqlText: `REVOKE OWNERSHIP ON FUTURE VIEWS IN DATABASE ${DESTINATION_DB_NAME} FROM ROLE DBT_CLOUD_MAYA;`});
snowflake.execute({sqlText: `REVOKE OWNERSHIP ON FUTURE STAGES IN DATABASE ${DESTINATION_DB_NAME} FROM ROLE DBT_CLOUD_MAYA;`});
snowflake.execute({sqlText: `REVOKE OWNERSHIP ON FUTURE TABLES IN DATABASE ${DESTINATION_DB_NAME} FROM ROLE DBT_CLOUD_MAYA;`});
snowflake.execute({sqlText: `GRANT OWNERSHIP ON FUTURE FUNCTIONS IN DATABASE ${DESTINATION_DB_NAME} TO ROLE ${ROLE_NAME};`});
snowflake.execute({sqlText: `GRANT OWNERSHIP ON FUTURE PROCEDURES IN DATABASE ${DESTINATION_DB_NAME} TO ROLE ${ROLE_NAME};`});
snowflake.execute({sqlText: `GRANT OWNERSHIP ON FUTURE VIEWS IN DATABASE ${DESTINATION_DB_NAME} TO ROLE ${ROLE_NAME};`});
snowflake.execute({sqlText: `GRANT OWNERSHIP ON FUTURE STAGES IN DATABASE ${DESTINATION_DB_NAME} TO ROLE ${ROLE_NAME};`});
snowflake.execute({sqlText: `GRANT OWNERSHIP ON FUTURE TABLES IN DATABASE ${DESTINATION_DB_NAME} TO ROLE ${ROLE_NAME};`});
snowflake.execute({sqlText: `GRANT OWNERSHIP ON DATABASE ${DESTINATION_DB_NAME} TO ROLE ${ROLE_NAME} COPY CURRENT GRANTS;`})
var existing_tags = snowflake.execute({sqlText: `SHOW TAGS IN DATABASE ${DESTINATION_DB_NAME};`});
while (existing_tags.next()) {
var schema = existing_tags.getColumnValue(4);
var tag_name = existing_tags.getColumnValue(2)
snowflake.execute({sqlText: `GRANT OWNERSHIP ON TAG ${DESTINATION_DB_NAME}.${schema}.${tag_name} TO ROLE ${ROLE_NAME} COPY CURRENT GRANTS;`});
}
snowflake.execute({sqlText: `COMMIT;`});
} catch (err) {
snowflake.execute({sqlText: `ROLLBACK;`});
throw(err);
}
return true
$$
{%- endmacro %}

View File

@ -0,0 +1,6 @@
{% macro add_database_or_schema_tags() %}
{{ set_database_tag_value(
'BLOCKCHAIN_NAME',
'MAYA'
) }}
{% endmacro %}

View File

@ -0,0 +1,127 @@
{% macro apply_meta_as_tags(results) %}
{% if var("UPDATE_SNOWFLAKE_TAGS") %}
{{ log('apply_meta_as_tags', info=False) }}
{{ log(results, info=False) }}
{% if execute %}
{%- set tags_by_schema = {} -%}
{% for res in results -%}
{% if res.node.meta.database_tags %}
{%- set model_database = res.node.database -%}
{%- set model_schema = res.node.schema -%}
{%- set model_schema_full = model_database+'.'+model_schema -%}
{%- set model_alias = res.node.alias -%}
{% if model_schema_full not in tags_by_schema.keys() %}
{{ log('need to fetch tags for schema '+model_schema_full, info=False) }}
{%- call statement('main', fetch_result=True) -%}
show tags in {{model_database}}.{{model_schema}}
{%- endcall -%}
{%- set _ = tags_by_schema.update({model_schema_full: load_result('main')['table'].columns.get('name').values()|list}) -%}
{{ log('Added tags to cache', info=False) }}
{% else %}
{{ log('already have tag info for schema', info=False) }}
{% endif %}
{%- set current_tags_in_schema = tags_by_schema[model_schema_full] -%}
{{ log('current_tags_in_schema:', info=False) }}
{{ log(current_tags_in_schema, info=False) }}
{{ log("========== Processing tags for "+model_schema_full+"."+model_alias+" ==========", info=False) }}
{% set line -%}
node: {{ res.node.unique_id }}; status: {{ res.status }} (message: {{ res.message }})
node full: {{ res.node}}
meta: {{ res.node.meta}}
materialized: {{ res.node.config.materialized }}
{%- endset %}
{{ log(line, info=False) }}
{%- call statement('main', fetch_result=True) -%}
select LEVEL,UPPER(TAG_NAME) as TAG_NAME,TAG_VALUE from table(information_schema.tag_references_all_columns('{{model_schema}}.{{model_alias}}', 'table'))
{%- endcall -%}
{%- set existing_tags_for_table = load_result('main')['data'] -%}
{{ log('Existing tags for table:', info=False) }}
{{ log(existing_tags_for_table, info=False) }}
{{ log('--', info=False) }}
{% for table_tag in res.node.meta.database_tags.table %}
{{ create_tag_if_missing(current_tags_in_schema,table_tag|upper) }}
{% set desired_tag_value = res.node.meta.database_tags.table[table_tag] %}
{{set_table_tag_value_if_different(model_schema,model_alias,table_tag,desired_tag_value,existing_tags_for_table)}}
{% endfor %}
{{ log("========== Finished processing tags for "+model_alias+" ==========", info=False) }}
{% endif %}
{% endfor %}
{% endif %}
{% endif %}
{% endmacro %}
{% macro create_tag_if_missing(all_tag_names,table_tag) %}
{% if table_tag not in all_tag_names %}
{{ log('Creating missing tag '+table_tag, info=False) }}
{%- call statement('main', fetch_result=True) -%}
create tag if not exists silver.{{table_tag}}
{%- endcall -%}
{{ log(load_result('main').data, info=False) }}
{% else %}
{{ log('Tag already exists: '+table_tag, info=False) }}
{% endif %}
{% endmacro %}
{% macro set_table_tag_value_if_different(model_schema,table_name,tag_name,desired_tag_value,existing_tags) %}
{{ log('Ensuring tag '+tag_name+' has value '+desired_tag_value+' at table level', info=False) }}
{%- set existing_tag_for_table = existing_tags|selectattr('0','equalto','TABLE')|selectattr('1','equalto',tag_name|upper)|list -%}
{{ log('Filtered tags for table:', info=False) }}
{{ log(existing_tag_for_table[0], info=False) }}
{% if existing_tag_for_table|length > 0 and existing_tag_for_table[0][2]==desired_tag_value %}
{{ log('Correct tag value already exists', info=False) }}
{% else %}
{{ log('Setting tag value for '+tag_name+' to value '+desired_tag_value, info=False) }}
{%- call statement('main', fetch_result=True) -%}
alter table {{model_schema}}.{{table_name}} set tag {{tag_name}} = '{{desired_tag_value}}'
{%- endcall -%}
{{ log(load_result('main').data, info=False) }}
{% endif %}
{% endmacro %}
{% macro set_column_tag_value_if_different(table_name,column_name,tag_name,desired_tag_value,existing_tags) %}
{{ log('Ensuring tag '+tag_name+' has value '+desired_tag_value+' at column level', info=False) }}
{%- set existing_tag_for_column = existing_tags|selectattr('0','equalto','COLUMN')|selectattr('1','equalto',tag_name|upper)|list -%}
{{ log('Filtered tags for column:', info=False) }}
{{ log(existing_tag_for_column[0], info=False) }}
{% if existing_tag_for_column|length > 0 and existing_tag_for_column[0][2]==desired_tag_value %}
{{ log('Correct tag value already exists', info=False) }}
{% else %}
{{ log('Setting tag value for '+tag_name+' to value '+desired_tag_value, info=False) }}
{%- call statement('main', fetch_result=True) -%}
alter table {{table_name}} modify column {{column_name}} set tag {{tag_name}} = '{{desired_tag_value}}'
{%- endcall -%}
{{ log(load_result('main').data, info=False) }}
{% endif %}
{% endmacro %}
{% macro set_database_tag_value(tag_name,tag_value) %}
{% set query %}
create tag if not exists silver.{{tag_name}}
{% endset %}
{% do run_query(query) %}
{% set query %}
alter database {{target.database}} set tag {{target.database}}.silver.{{tag_name}} = '{{tag_value}}'
{% endset %}
{% do run_query(query) %}
{% endmacro %}
{% macro set_schema_tag_value(target_schema,tag_name,tag_value) %}
{% set query %}
create tag if not exists silver.{{tag_name}}
{% endset %}
{% do run_query(query) %}
{% set query %}
alter schema {{target.database}}.{{target_schema}} set tag {{target.database}}.silver.{{tag_name}} = '{{tag_value}}'
{% endset %}
{% do run_query(query) %}
{% endmacro %}

View File

@ -0,0 +1,29 @@
{% test compare_model_subset(model, compare_model, compare_columns, model_condition) %}
{% set compare_cols_csv = compare_columns | join(', ') %}
with a as (
select {{compare_cols_csv}} from {{ model }}
{{ model_condition }}
),
b as (
select {{compare_cols_csv}} from {{ compare_model }}
),
a_minus_b as (
select * from a
except
select * from b
),
b_minus_a as (
select * from b
except
select * from a
),
unioned as (
select 'in_actual_not_in_expected' as which_diff, a_minus_b.* from a_minus_b
union all
select 'in_expected_not_in_actual' as which_diff, b_minus_a.* from b_minus_a
)
select * from unioned
{% endtest %}

View File

@ -0,0 +1,58 @@
{% macro date_gaps(
table,
partition_by,
column
) %}
{%- set partition_sql = partition_by | join(", ") -%}
{%- set previous_column = "prev_" ~ column -%}
WITH source AS (
SELECT
{{ partition_sql + "," if partition_sql }}
{{ column }},
LAG(
{{ column }},
1
) over (
{{ "PARTITION BY " ~ partition_sql if partition_sql }}
ORDER BY
{{ column }} ASC
) AS {{ previous_column }}
FROM
{{ table }}
)
SELECT
{{ partition_sql + "," if partition_sql }}
{{ previous_column }},
{{ column }},
DATEDIFF(
days,
{{ previous_column }},
{{ column }}
) - 1 AS gap
FROM
source
{% if varargs -%}
LEFT JOIN (
{% for x in varargs %}
(
{{ dbt_utils.date_spine(
datepart = "day",
start_date = x.start_date,
end_date = x.end_date
) }}
)
{{- "UNION ALL" if not loop.last -}}
{% endfor %}
) exclude
ON source.day = exclude.date_day
{%- endif %}
WHERE
DATEDIFF(
days,
{{ previous_column }},
{{ column }}
) > 1
{{ "AND source.day != exclude.date_day" if varargs }}
ORDER BY
gap DESC
{% endmacro %}

View File

@ -0,0 +1,10 @@
{% test negative_one(
model,
column_name
) %}
SELECT
*
FROM
{{ model }}
WHERE
{{ column_name }} = '-1' {% endtest %}

View File

@ -0,0 +1,34 @@
{% macro sequence_distinct_gaps(
table,
column
) %}
{%- set partition_sql = partition_by | join(", ") -%}
{%- set previous_column = "prev_" ~ column -%}
WITH source AS (
SELECT
{{ partition_sql + "," if partition_sql }}
{{ column }},
LAG(
{{ column }},
1
) over (
ORDER BY
{{ column }} ASC
) AS {{ previous_column }}
FROM (
SELECT DISTINCT {{ column }} FROM {{ table }}
)
)
SELECT
{{ previous_column }},
{{ column }},
{{ column }} - {{ previous_column }}
- 1 AS gap
FROM
source
WHERE
{{ column }} - {{ previous_column }} <> 1
ORDER BY
gap DESC
{% endmacro %}

View File

@ -0,0 +1,43 @@
{% macro sequence_distinct_gaps_dim_block_id(
table,
column
) %}
{%- set partition_sql = partition_by | join(", ") -%}
{%- set previous_column = "prev_" ~ column -%}
WITH source AS (
SELECT
{{ partition_sql + "," if partition_sql }}
{{ column }},
LAG(
{{ column }},
1
) over (
ORDER BY
{{ column }} ASC
) AS {{ previous_column }}
FROM
(
SELECT
DISTINCT {{ column }}
FROM
{{ table }} A
JOIN {{ ref('core__dim_block') }}
b
ON A.dim_block_id = b.dim_block_id
WHERE
A.dim_block_id <> '-1'
AND b.block_timestamp :: DATE < CURRENT_DATE
)
)
SELECT
{{ previous_column }},
{{ column }},
{{ column }} - {{ previous_column }}
- 1 AS gap
FROM
source
WHERE
{{ column }} - {{ previous_column }} <> 1
ORDER BY
gap DESC
{% endmacro %}

View File

@ -0,0 +1,37 @@
{% macro sequence_gaps(
table,
partition_by,
column
) %}
{%- set partition_sql = partition_by | join(", ") -%}
{%- set previous_column = "prev_" ~ column -%}
WITH source AS (
SELECT
{{ partition_sql + "," if partition_sql }}
{{ column }},
LAG(
{{ column }},
1
) over (
{{ "PARTITION BY " ~ partition_sql if partition_sql }}
ORDER BY
{{ column }} ASC
) AS {{ previous_column }}
FROM
{{ table }}
WHERE
block_timestamp::date <= current_date - 1
)
SELECT
{{ partition_sql + "," if partition_sql }}
{{ previous_column }},
{{ column }},
{{ column }} - {{ previous_column }}
- 1 AS gap
FROM
source
WHERE
{{ column }} - {{ previous_column }} <> 1
ORDER BY
gap DESC
{% endmacro %}

View File

@ -0,0 +1,52 @@
{% macro sequence_gaps_buffered_look_back(
table,
partition_by,
column,
delayed_column,
delayed_period
) %}
{%- set partition_sql = partition_by | join(", ") -%}
{%- set previous_column = "prev_" ~ column -%}
WITH source AS (
SELECT
{{ partition_sql + "," if partition_sql }}
{{ column }},
LAG(
{{ column }},
1
) over (
{{ "PARTITION BY " ~ partition_sql if partition_sql }}
ORDER BY
{{ column }} ASC
) AS {{ previous_column }},
LAG(
{{ delayed_column }},
1
) over (
{{ "PARTITION BY " ~ partition_sql if partition_sql }}
ORDER BY
{{ column }} ASC
) AS {{ delayed_column }}
FROM
{{ table }}
)
SELECT
{{ partition_sql + "," if partition_sql }}
{{ previous_column }},
{{ column }},
{{ column }} - {{ previous_column }}
- 1 AS gap
FROM
source
WHERE
{{ column }} - {{ previous_column }} <> 1
AND
{{ delayed_column }} < (
SELECT
MAX(
{{ delayed_column }}
)
FROM
{{ this }}
) - INTERVAL '{{ delayed_period }}'
{% endmacro %}

View File

@ -0,0 +1,17 @@
{{ config(
materialized = 'view'
) }}
SELECT
add_asgard_addr,
event_id,
block_timestamp,
__HEVO__DATABASE_NAME,
__HEVO__SCHEMA_NAME,
__HEVO__INGESTED_AT,
__HEVO__LOADED_AT,
FROM
{{ source(
'maya_midgard',
'midgard_active_vault_events'
) }}

View File

@ -0,0 +1,25 @@
{{ config(
materialized = 'view'
) }}
SELECT
tx,
chain,
from_addr,
to_addr,
asset,
asset_e8,
memo,
cacao_e8,
pool,
event_id,
block_timestamp,
__HEVO__DATABASE_NAME,
__HEVO__SCHEMA_NAME,
__HEVO__INGESTED_AT,
__HEVO__LOADED_AT,
FROM
{{ source(
'maya_midgard',
'midgard_add_events'
) }}

View File

@ -0,0 +1,18 @@
{{ config(
materialized = 'view'
) }}
SELECT
HASH,
height,
TIMESTAMP,
agg_state,
__HEVO__DATABASE_NAME,
__HEVO__SCHEMA_NAME,
__HEVO__INGESTED_AT,
__HEVO__LOADED_AT
FROM
{{ source(
'maya_midgard',
'midgard_block_log'
) }}

View File

@ -0,0 +1,19 @@
{{ config(
materialized = 'view'
) }}
SELECT
pool,
asset_e8,
cacao_e8,
synth_e8,
block_timestamp,
__HEVO__DATABASE_NAME,
__HEVO__SCHEMA_NAME,
__HEVO__INGESTED_AT,
__HEVO__LOADED_AT
FROM
{{ source(
'maya_midgard',
'midgard_block_pool_depths'
) }}

View File

@ -0,0 +1,25 @@
{{ config(
materialized = 'view'
) }}
SELECT
tx,
chain,
from_addr,
to_addr,
asset,
asset_e8,
memo,
bond_type,
e8,
event_id,
block_timestamp,
__HEVO__DATABASE_NAME,
__HEVO__SCHEMA_NAME,
__HEVO__INGESTED_AT,
__HEVO__LOADED_AT
FROM
{{ source(
'maya_midgard',
'midgard_bond_events'
) }}

View File

@ -0,0 +1,16 @@
{{ config(
materialized = 'view'
) }}
SELECT
cacao_price_e8,
block_timestamp,
__HEVO__DATABASE_NAME,
__HEVO__SCHEMA_NAME,
__HEVO__INGESTED_AT,
__HEVO__LOADED_AT,
FROM
{{ source(
'maya_midgard',
'midgard_cacao_price'
) }}

View File

@ -0,0 +1,16 @@
{{ config(
materialized = 'view'
) }}
SELECT
key,
VALUE,
__HEVO__DATABASE_NAME,
__HEVO__SCHEMA_NAME,
__HEVO__INGESTED_AT,
__HEVO__LOADED_AT,
FROM
{{ source(
'maya_midgard',
'midgard_constants'
) }}

View File

@ -0,0 +1,23 @@
{{ config(
materialized = 'view'
) }}
SELECT
tx_id,
code,
memo,
asset,
amount_e8,
from_addr,
reason,
event_id,
block_timestamp,
__HEVO__DATABASE_NAME,
__HEVO__SCHEMA_NAME,
__HEVO__INGESTED_AT,
__HEVO__LOADED_AT,
FROM
{{ source(
'maya_midgard',
'midgard_failed_deposit_messages'
) }}

View File

@ -0,0 +1,20 @@
{{ config(
materialized = 'view'
) }}
SELECT
tx,
asset,
asset_e8,
pool_deduct,
event_id,
block_timestamp,
__HEVO__DATABASE_NAME,
__HEVO__SCHEMA_NAME,
__HEVO__INGESTED_AT,
__HEVO__LOADED_AT,
FROM
{{ source(
'maya_midgard',
'midgard_fee_events'
) }}

View File

@ -0,0 +1,20 @@
{{ config(
materialized = 'view'
) }}
SELECT
asset,
asset_e8,
cacao_e8,
tx_count,
event_id,
block_timestamp,
__HEVO__DATABASE_NAME,
__HEVO__SCHEMA_NAME,
__HEVO__INGESTED_AT,
__HEVO__LOADED_AT,
FROM
{{ source(
'maya_midgard',
'midgard_gas_events'
) }}

View File

@ -0,0 +1,17 @@
{{ config(
materialized = 'view'
) }}
SELECT
add_asgard_addr,
event_id,
block_timestamp,
__HEVO__DATABASE_NAME,
__HEVO__SCHEMA_NAME,
__HEVO__INGESTED_AT,
__HEVO__LOADED_AT,
FROM
{{ source(
'maya_midgard',
'midgard_inactive_vault_events'
) }}

View File

@ -0,0 +1,30 @@
{{ config(
materialized = 'view'
) }}
SELECT
NAME,
chain,
address,
registration_fee_e8,
fund_amount_e8,
height,
expire,
owner,
tx_id,
memo,
sender,
preferred_asset,
affiliate_bps,
sub_affiliates,
event_id,
block_timestamp,
__HEVO__DATABASE_NAME,
__HEVO__SCHEMA_NAME,
__HEVO__INGESTED_AT,
__HEVO__LOADED_AT,
FROM
{{ source(
'maya_midgard',
'midgard_mayaname_change_events'
) }}

View File

@ -0,0 +1,17 @@
{{ config(
materialized = 'view'
) }}
SELECT
node_addr,
event_id,
block_timestamp,
__HEVO__DATABASE_NAME,
__HEVO__SCHEMA_NAME,
__HEVO__INGESTED_AT,
__HEVO__LOADED_AT,
FROM
{{ source(
'maya_midgard',
'midgard_new_node_events'
) }}

View File

@ -0,0 +1,25 @@
{{ config(
materialized = 'view'
) }}
SELECT
tx,
chain,
from_addr,
to_addr,
asset,
asset_e8,
memo,
in_tx,
internal,
event_id,
block_timestamp,
__HEVO__DATABASE_NAME,
__HEVO__SCHEMA_NAME,
__HEVO__INGESTED_AT,
__HEVO__LOADED_AT,
FROM
{{ source(
'maya_midgard',
'midgard_outbound_events'
) }}

View File

@ -0,0 +1,25 @@
{{ config(
materialized = 'view'
) }}
SELECT
pool,
asset_tx,
asset_chain,
asset_addr,
asset_e8,
cacao_tx,
cacao_addr,
cacao_e8,
pending_type,
event_id,
block_timestamp,
__HEVO__DATABASE_NAME,
__HEVO__SCHEMA_NAME,
__HEVO__INGESTED_AT,
__HEVO__LOADED_AT,
FROM
{{ source(
'maya_midgard',
'midgard_pending_liquidity_events'
) }}

View File

@ -0,0 +1,22 @@
{{ config(
materialized = 'view'
) }}
SELECT
asset,
cacao_amt,
cacao_add,
asset_amt,
asset_add,
reason,
event_id,
block_timestamp,
__HEVO__DATABASE_NAME,
__HEVO__SCHEMA_NAME,
__HEVO__INGESTED_AT,
__HEVO__LOADED_AT,
FROM
{{ source(
'maya_midgard',
'midgard_pool_balance_change_events'
) }}

View File

@ -0,0 +1,18 @@
{{ config(
materialized = 'view'
) }}
SELECT
asset,
status,
event_id,
block_timestamp,
__HEVO__DATABASE_NAME,
__HEVO__SCHEMA_NAME,
__HEVO__INGESTED_AT,
__HEVO__LOADED_AT,
FROM
{{ source(
'maya_midgard',
'midgard_pool_events'
) }}

View File

@ -0,0 +1,27 @@
{{ config(
materialized = 'view'
) }}
SELECT
tx,
chain,
from_addr,
to_addr,
asset,
asset_e8,
asset_2nd,
asset_2nd_e8,
memo,
code,
reason,
event_id,
block_timestamp,
__HEVO__DATABASE_NAME,
__HEVO__SCHEMA_NAME,
__HEVO__INGESTED_AT,
__HEVO__LOADED_AT,
FROM
{{ source(
'maya_midgard',
'midgard_refund_events'
) }}

View File

@ -0,0 +1,25 @@
{{ config(
materialized = 'view'
) }}
SELECT
tx,
chain,
from_addr,
to_addr,
asset,
asset_e8,
memo,
addr,
e8,
event_id,
block_timestamp,
__HEVO__DATABASE_NAME,
__HEVO__SCHEMA_NAME,
__HEVO__INGESTED_AT,
__HEVO__LOADED_AT,
FROM
{{ source(
'maya_midgard',
'midgard_reserve_events'
) }}

View File

@ -0,0 +1,19 @@
{{ config(
materialized = 'view'
) }}
SELECT
pool,
cacao_e8,
saver_e8,
event_id,
block_timestamp,
__HEVO__DATABASE_NAME,
__HEVO__SCHEMA_NAME,
__HEVO__INGESTED_AT,
__HEVO__LOADED_AT,
FROM
{{ source(
'maya_midgard',
'midgard_rewards_event_entries'
) }}

View File

@ -0,0 +1,17 @@
{{ config(
materialized = 'view'
) }}
SELECT
bond_e8,
event_id,
block_timestamp,
__HEVO__DATABASE_NAME,
__HEVO__SCHEMA_NAME,
__HEVO__INGESTED_AT,
__HEVO__LOADED_AT,
FROM
{{ source(
'maya_midgard',
'midgard_rewards_events'
) }}

View File

@ -0,0 +1,24 @@
{{ config(
materialized = 'view'
) }}
SELECT
amount_e8,
asset,
from_addr,
to_addr,
memo,
tx_id,
raw_log,
code,
event_id,
block_timestamp,
__HEVO__DATABASE_NAME,
__HEVO__SCHEMA_NAME,
__HEVO__INGESTED_AT,
__HEVO__LOADED_AT,
FROM
{{ source(
'maya_midgard',
'midgard_send_messages'
) }}

View File

@ -0,0 +1,18 @@
{{ config(
materialized = 'view'
) }}
SELECT
node_addr,
ip_addr,
event_id,
block_timestamp,
__HEVO__DATABASE_NAME,
__HEVO__SCHEMA_NAME,
__HEVO__INGESTED_AT,
__HEVO__LOADED_AT,
FROM
{{ source(
'maya_midgard',
'midgard_set_ip_address_events'
) }}

View File

@ -0,0 +1,18 @@
{{ config(
materialized = 'view'
) }}
SELECT
key,
VALUE,
event_id,
block_timestamp,
__HEVO__DATABASE_NAME,
__HEVO__SCHEMA_NAME,
__HEVO__INGESTED_AT,
__HEVO__LOADED_AT,
FROM
{{ source(
'maya_midgard',
'midgard_set_mimir_events'
) }}

View File

@ -0,0 +1,20 @@
{{ config(
materialized = 'view'
) }}
SELECT
node_addr,
secp256k1,
ed25519,
validator_consensus,
event_id,
block_timestamp,
__HEVO__DATABASE_NAME,
__HEVO__SCHEMA_NAME,
__HEVO__INGESTED_AT,
__HEVO__LOADED_AT,
FROM
{{ source(
'maya_midgard',
'midgard_set_node_keys_events'
) }}

View File

@ -0,0 +1,19 @@
{{ config(
materialized = 'view'
) }}
SELECT
address,
key,
VALUE,
event_id,
block_timestamp,
__HEVO__DATABASE_NAME,
__HEVO__SCHEMA_NAME,
__HEVO__INGESTED_AT,
__HEVO__LOADED_AT,
FROM
{{ source(
'maya_midgard',
'midgard_set_node_mimir_events'
) }}

View File

@ -0,0 +1,18 @@
{{ config(
materialized = 'view'
) }}
SELECT
node_addr,
version,
event_id,
block_timestamp,
__HEVO__DATABASE_NAME,
__HEVO__SCHEMA_NAME,
__HEVO__INGESTED_AT,
__HEVO__LOADED_AT,
FROM
{{ source(
'maya_midgard',
'midgard_set_version_events'
) }}

View File

@ -0,0 +1,19 @@
{{ config(
materialized = 'view'
) }}
SELECT
pool,
asset,
asset_e8,
event_id,
block_timestamp,
__HEVO__DATABASE_NAME,
__HEVO__SCHEMA_NAME,
__HEVO__INGESTED_AT,
__HEVO__LOADED_AT,
FROM
{{ source(
'maya_midgard',
'midgard_slash_events'
) }}

View File

@ -0,0 +1,22 @@
{{ config(
materialized = 'view'
) }}
SELECT
bond_address,
lp_address,
asset,
lp_units,
asset_e8_loss,
cacao_e10_loss,
event_id,
block_timestamp,
__HEVO__DATABASE_NAME,
__HEVO__SCHEMA_NAME,
__HEVO__INGESTED_AT,
__HEVO__LOADED_AT,
FROM
{{ source(
'maya_midgard',
'midgard_slash_liquidity_events'
) }}

View File

@ -0,0 +1,19 @@
{{ config(
materialized = 'view'
) }}
SELECT
node_address,
slash_points,
reason,
event_id,
block_timestamp,
__HEVO__DATABASE_NAME,
__HEVO__SCHEMA_NAME,
__HEVO__INGESTED_AT,
__HEVO__LOADED_AT,
FROM
{{ source(
'maya_midgard',
'midgard_slash_points_events'
) }}

View File

@ -0,0 +1,27 @@
{{ config(
materialized = 'view'
) }}
SELECT
pool,
asset_tx,
asset_chain,
asset_addr,
asset_e8,
stake_units,
cacao_tx,
cacao_addr,
cacao_e8,
_ASSET_IN_CACAO_E8,
memo,
event_id,
block_timestamp,
__HEVO__DATABASE_NAME,
__HEVO__SCHEMA_NAME,
__HEVO__INGESTED_AT,
__HEVO__LOADED_AT,
FROM
{{ source(
'maya_midgard',
'midgard_stake_events'
) }}

View File

@ -0,0 +1,29 @@
{{ config(
materialized = 'view'
) }}
SELECT
tx_id,
INTERVAL,
quantity,
COUNT,
last_height,
deposit_asset,
deposit_e8,
in_asset,
in_e8,
out_asset,
out_e8,
failed_swaps,
failed_swap_reasons,
event_id,
block_timestamp,
__HEVO__DATABASE_NAME,
__HEVO__SCHEMA_NAME,
__HEVO__INGESTED_AT,
__HEVO__LOADED_AT,
FROM
{{ source(
'maya_midgard',
'midgard_streaming_swap_details_events'
) }}

View File

@ -0,0 +1,34 @@
{{ config(
materialized = 'view'
) }}
SELECT
tx,
chain,
from_addr,
to_addr,
from_asset,
from_e8,
to_asset,
to_e8,
memo,
pool,
to_e8_min,
swap_slip_bp,
liq_fee_e8,
liq_fee_in_cacao_e8,
_DIRECTION,
_STREAMING,
streaming_count,
streaming_quantity,
event_id,
block_timestamp,
__HEVO__DATABASE_NAME,
__HEVO__SCHEMA_NAME,
__HEVO__INGESTED_AT,
__HEVO__LOADED_AT,
FROM
{{ source(
'maya_midgard',
'midgard_swap_events'
) }}

View File

@ -0,0 +1,20 @@
{{ config(
materialized = 'view'
) }}
SELECT
from_addr,
to_addr,
asset,
amount_e8,
event_id,
block_timestamp,
__HEVO__DATABASE_NAME,
__HEVO__SCHEMA_NAME,
__HEVO__INGESTED_AT,
__HEVO__LOADED_AT,
FROM
{{ source(
'maya_midgard',
'midgard_transfer_events'
) }}

View File

@ -0,0 +1,19 @@
{{ config(
materialized = 'view'
) }}
SELECT
node_addr,
former,
"CURRENT",
event_id,
block_timestamp,
__HEVO__DATABASE_NAME,
__HEVO__SCHEMA_NAME,
__HEVO__INGESTED_AT,
__HEVO__LOADED_AT,
FROM
{{ source(
'maya_midgard',
'midgard_update_node_account_status_events'
) }}

View File

@ -0,0 +1,19 @@
{{ config(
materialized = 'view'
) }}
SELECT
tx,
from_addr,
node_addr,
event_id,
block_timestamp,
__HEVO__DATABASE_NAME,
__HEVO__SCHEMA_NAME,
__HEVO__INGESTED_AT,
__HEVO__LOADED_AT,
FROM
{{ source(
'maya_midgard',
'midgard_validator_request_leave_events'
) }}

View File

@ -0,0 +1,31 @@
{{ config(
materialized = 'view'
) }}
SELECT
tx,
chain,
from_addr,
to_addr,
asset,
asset_e8,
emit_asset_e8,
emit_cacao_e8,
memo,
pool,
stake_units,
basis_points,
asymmetry,
imp_loss_protection_e8,
_EMIT_ASSET_IN_CACAO_E8,
event_id,
block_timestamp,
__HEVO__DATABASE_NAME,
__HEVO__SCHEMA_NAME,
__HEVO__INGESTED_AT,
__HEVO__LOADED_AT,
FROM
{{ source(
'maya_midgard',
'midgard_withdraw_events'
) }}

View File

@ -0,0 +1,123 @@
{% docs __overview__ %}
# Welcome to the Flipside Crypto Maya Models Documentation
## **What does this documentation cover?**
The documentation included here details the design of the Maya
tables and views available via [Flipside Crypto.](https://flipsidecrypto.xyz/insights/dashboards/maya) For more information on how these models are built, please see [the github repository.](https://github.com/FlipsideCrypto/maya-models)
## **How do I use these docs?**
The easiest way to navigate this documentation is to use the Quick Links below. These links will take you to the documentation for each table, which contains a description, a list of the columns, and other helpful information.
If you are experienced with dbt docs, feel free to use the sidebar to navigate the documentation, as well as explore the relationships between tables and the logic building them.
There is more information on how to use dbt docs in the last section of this document.
## **Quick Links to Table Documentation**
**Click on the links below to jump to the documentation for each schema.**
### Core Schema (`maya`.`CORE`.`<table_name>`)
- [core.dim_block](#!/model/model.maya_models.core__dim_block)
- [core.dim_midgard](#!/model/model.maya_models.core__dim_midgard)
- [core.fact_mayaname_change_events](#!/model/model.maya_models.core__fact_mayaname_change_events)
- [core.fact_set_mimir_events](#!/model/model.maya_models.core__fact_set_mimir_events)
- [core.fact_transfer_events](#!/model/model.maya_models.core__fact_transfer_events)
- [core.fact_transfers](#!/model/model.maya_models.core__fact_transfers)
### Defi Schema
- [defi.fact_active_vault_events](#!/model/model.maya_models.defi__fact_active_vault_events)
- [defi.fact_add_events](#!/model/model.maya_models.defi__fact_add_events)
- [defi.fact_block_pool_depths](#!/model/model.maya_models.defi__fact_block_pool_depths)
- [defi.fact_block_rewards](#!/model/model.maya_models.defi__fact_block_rewards)
- [defi.fact_bond_actions](#!/model/model.maya_models.defi__fact_bond_actions)
- [defi.fact_bond_events](#!/model/model.maya_models.defi__fact_bond_events)
- [defi.fact_daily_earnings](#!/model/model.maya_models.defi__fact_daily_earnings)
- [defi.fact_daily_pool_stats](#!/model/model.maya_models.defi__fact_daily_pool_stats)
- [defi.fact_daily_tvl](#!/model/model.maya_models.defi__fact_daily_tvl)
- [defi.fact_failed_deposit_messages](#!/model/model.maya_models.defi__fact_failed_deposit_messages)
- [defi.fact_fee_events](#!/model/model.maya_models.defi__fact_fee_events)
- [defi.fact_gas_events](#!/model/model.maya_models.defi__fact_gas_events)
- [defi.fact_inactive_vault_events](#!/model/model.maya_models.defi__fact_inactive_vault_events)
- [defi.fact_liquidity_actions](#!/model/model.maya_models.defi__fact_liquidity_actions)
- [defi.fact_outbound_events](#!/model/model.maya_models.defi__fact_outbound_events)
- [defi.fact_pending_liquidity_events](#!/model/model.maya_models.defi__fact_pending_liquidity_events)
- [defi.fact_pool_balance_change_events](#!/model/model.maya_models.defi__fact_pool_balance_change_events)
- [defi.fact_pool_block_balances](#!/model/model.maya_models.defi__fact_pool_block_balances)
- [defi.fact_pool_block_fees](#!/model/model.maya_models.defi__fact_pool_block_fees)
- [defi.fact_pool_block_statistics](#!/model/model.maya_models.defi__fact_pool_block_statistics)
- [defi.fact_pool_events](#!/model/model.maya_models.defi__fact_pool_events)
- [defi.fact_refund_events](#!/model/model.maya_models.defi__fact_refund_events)
- [defi.fact_reserve_events](#!/model/model.maya_models.defi__fact_reserve_events)
- [defi.fact_rewards_event_entries](#!/model/model.maya_models.defi__fact_rewards_event_entries)
- [defi.fact_rewards_events](#!/model/model.maya_models.defi__fact_rewards_events)
- [defi.fact_send_message_events](#!/model/model.maya_models.defi__fact_send_message_events)
- [defi.fact_slash_liquidity_events](#!/model/model.maya_models.defi__fact_slash_liquidity_events)
- [defi.fact_stake_events](#!/model/model.maya_models.defi__fact_stake_events)
- [defi.fact_streamling_swap_details_events](#!/model/model.maya_models.defi__fact_streamling_swap_details_events)
- [defi.fact_swaps](#!/model/model.maya_models.defi__fact_swaps)
- [defi.fact_swaps_events](#!/model/model.maya_models.defi__fact_swaps_events)
- [defi.fact_total_block_rewards](#!/model/model.maya_models.defi__fact_total_block_rewards)
- [defi.fact_total_value_locked](#!/model/model.maya_models.defi__fact_total_value_locked)
- [defi.fact_update_node_account_status_events](#!/model/model.maya_models.defi__fact_update_node_account_status_events)
- [defi.fact_withdraw_events](#!/model/model.maya_models.defi__fact_withdraw_events)
### Governance Schema
- [gov.fact_new_node_events](#!/model/model.maya_models.gov__fact_new_node_events)
- [gov.fact_set_ip_address_events](#!/model/model.maya_models.gov__fact_set_ip_address_events)
- [gov.fact_set_node_keys_events](#!/model/model.maya_models.gov__fact_set_node_keys_events)
- [gov.fact_set_version_events](#!/model/model.maya_models.gov__fact_set_version_events)
- [gov.fact_slash_amounts](#!/model/model.maya_models.gov__fact_slash_amounts)
- [gov.fact_slash_points](#!/model/model.maya_models.gov__fact_slash_points)
- [gov.fact_validator_request_leave_events](#!/model/model.maya_models.gov__fact_validator_request_leave_events)
### Price Schema
- [price.factcacao_price](#!/model/model.maya_models.price__fact_cacao_price)
## **Data Model Overview**
While maya models are built a few different ways, they are organized into three layers of sql models: **bronze, silver, and gold (or core).**
- Bronze: Data is loaded in from the source as a view
- Silver: All necessary parsing, filtering, de-duping, and other transformations are done here
- Gold (or core): Final views and tables that are available publicly
The dimension tables are sourced from a variety of on-chain and off-chain sources.
Convenience views (denoted ez\_) are a combination of different fact and dimension tables. These views are built to make it easier to query the data.
## **Using dbt docs**
### Navigation
You can use the `Project` and `Database` navigation tabs on the left side of the window to explore the models in the project.
### Database Tab
This view shows relations (tables and views) grouped into database schemas. Note that ephemeral models are _not_ shown in this interface, as they do not exist in the database.
### Graph Exploration
You can click the blue icon on the bottom-right corner of the page to view the lineage graph of your models.
On model pages, you'll see the immediate parents and children of the model you're exploring. By clicking the Expand button at the top-right of this lineage pane, you'll be able to see all of the models that are used to build, or are built from, the model you're exploring.
Once expanded, you'll be able to use the `--models` and `--exclude` model selection syntax to filter the models in the graph. For more information on model selection, check out the [dbt docs](https://docs.getdbt.com/docs/model-selection-syntax).
Note that you can also right-click on models to interactively filter and explore the graph.
### **More information**
- [maya on Flipside Crypto](https://flipsidecrypto.xyz/insights/dashboards/maya)
- [Tutorials](https://docs.flipsidecrypto.com/our-data/tutorials)
- [Github](https://github.com/FlipsideCrypto/maya-models)
- [What is dbt?](https://docs.getdbt.com/docs/introduction)
{% enddocs %}

View File

@ -0,0 +1,5 @@
{% docs address %}
The account public key
{% enddocs %}

View File

@ -0,0 +1,5 @@
{% docs asset %}
Asset name or pool name
{% enddocs %}

View File

@ -0,0 +1,5 @@
{% docs block_date %}
Date of block minting(without a timezone)
{% enddocs %}

View File

@ -0,0 +1,5 @@
{% docs block_id %}
ID of the confirmed block
{% enddocs %}

View File

@ -0,0 +1,5 @@
{% docs block_timestamp %}
Timestamp of block minting(without a timezone)
{% enddocs %}

View File

@ -0,0 +1,5 @@
{% docs blockchain %}
The name of the blockchain
{% enddocs %}

View File

@ -0,0 +1,5 @@
{% docs bond_type %}
There are 4 different types of bond:bond_reward, bond_paid, bond_cost, bond_returned
{% enddocs %}

View File

@ -0,0 +1,5 @@
{% docs from_address %}
The sending address for this event
{% enddocs %}

View File

@ -0,0 +1,5 @@
{% docs id %}
The primary key for the table.
{% enddocs %}

View File

@ -0,0 +1,5 @@
{% docs inserted_timestamp %}
The utc timestamp at which the row was inserted into the table.
{% enddocs %}

View File

@ -0,0 +1,5 @@
{% docs memo %}
The memo for this event
{% enddocs %}

View File

@ -0,0 +1,5 @@
{% docs modified_timestamp %}
The utc timestamp at which the row was last modified.
{% enddocs %}

View File

@ -0,0 +1,5 @@
{% docs pk %}
The unique identifier for each row in the table.
{% enddocs %}

View File

@ -0,0 +1,5 @@
{% docs pool_name %}
Name of the pool -- also asset name in other tables
{% enddocs %}

View File

@ -0,0 +1,137 @@
{% docs prices_dim_asset_metadata_table_doc %}
A comprehensive dimensional table holding asset metadata and other relevant details pertaining to each id, from multiple providers. This data set includes raw, non-transformed data coming directly from the provider APIs and rows are not intended to be unique. As a result, there may be data quality issues persisting in the APIs that flow through to this dimensional model. If you are interested in using a curated data set instead, please utilize ez_asset_metadata.
{% enddocs %}
{% docs prices_ez_asset_metadata_table_doc %}
A convenience table holding prioritized asset metadata and other relevant details pertaining to each token_address and native asset. This data set is highly curated and contains metadata for one unique asset per blockchain.
{% enddocs %}
{% docs prices_fact_prices_ohlc_hourly_table_doc %}
A comprehensive fact table holding id and provider specific open, high, low, close hourly prices, from multiple providers. This data set includes raw, non-transformed data coming directly from the provider APIs and rows are not intended to be unique. As a result, there may be data quality issues persisting in the APIs that flow through to this fact based model. If you are interested in using a curated data set instead, please utilize ez_prices_hourly.
{% enddocs %}
{% docs prices_ez_prices_hourly_table_doc %}
A convenience table for determining token prices by address and blockchain, and native asset prices by symbol and blockchain. This data set is highly curated and contains metadata for one price per hour per unique asset and blockchain.
{% enddocs %}
{% docs prices_provider %}
The provider or source of the data.
{% enddocs %}
{% docs prices_asset_id %}
The unique identifier representing the asset.
{% enddocs %}
{% docs prices_name %}
The name of asset.
{% enddocs %}
{% docs prices_symbol %}
The symbol of asset.
{% enddocs %}
{% docs prices_token_address %}
The specific address representing the asset on a specific platform. This will be NULL if referring to a native asset.
{% enddocs %}
{% docs prices_blockchain %}
The Blockchain, Network, or Platform for this asset.
{% enddocs %}
{% docs prices_blockchain_id %}
The unique identifier of the Blockchain, Network, or Platform for this asset.
{% enddocs %}
{% docs prices_decimals %}
The number of decimals for the asset. May be NULL.
{% enddocs %}
{% docs prices_is_native %}
A flag indicating assets native to the respective blockchain.
{% enddocs %}
{% docs prices_is_deprecated %}
A flag indicating if the asset is deprecated or no longer supported by the provider.
{% enddocs %}
{% docs prices_id_deprecation %}
Deprecating soon! Please use the `asset_id` column instead.
{% enddocs %}
{% docs prices_decimals_deprecation %}
Deprecating soon! Please use the decimals column in `ez_asset_metadata` or join in `dim_contracts` instead.
{% enddocs %}
{% docs prices_hour %}
Hour that the price was recorded at.
{% enddocs %}
{% docs prices_price %}
Closing price of the recorded hour in USD.
{% enddocs %}
{% docs prices_is_imputed %}
A flag indicating if the price was imputed, or derived, from the last arriving record. This is generally used for tokens with low-liquidity or inconsistent reporting.
{% enddocs %}
{% docs prices_open %}
Opening price of the recorded hour in USD.
{% enddocs %}
{% docs prices_high %}
Highest price of the recorded hour in USD
{% enddocs %}
{% docs prices_low %}
Lowest price of the recorded hour in USD
{% enddocs %}
{% docs prices_close %}
Closing price of the recorded hour in USD
{% enddocs %}

View File

@ -0,0 +1,5 @@
{% docs sk %}
The surrogate key for the table. Will be unique and is used as a foreign key in other tables
{% enddocs %}

View File

@ -0,0 +1,5 @@
{% docs to_address %}
The receiving address for this event
{% enddocs %}

View File

@ -0,0 +1,5 @@
{% docs tx_id %}
The unique transaction id
{% enddocs %}

View File

@ -0,0 +1,86 @@
{{ config(
materialized = 'incremental',
unique_key = 'dim_block_id',
incremental_strategy = 'merge',
incremental_predicates = ['DBT_INTERNAL_DEST.block_id >= (select min(block_id) from ' ~ generate_tmp_view_name(this) ~ ')'],
cluster_by = ['block_timestamp::DATE']
) }}
SELECT
{{ dbt_utils.generate_surrogate_key(
['height']
) }} AS dim_block_id,
height AS block_id,
block_timestamp,
block_date,
block_hour,
block_week,
block_month,
block_quarter,
block_year,
block_DAYOFMONTH,
block_DAYOFWEEK,
block_DAYOFYEAR,
TIMESTAMP,
HASH,
agg_state,
_INSERTED_TIMESTAMP,
'{{ invocation_id }}' AS _invocation_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp
FROM
{{ ref('silver__block_log') }}
{% if is_incremental() %}
WHERE
block_id >= (
SELECT
MAX(
block_id - 600 --about 1 hour
)
FROM
{{ this }}
)
UNION ALL
SELECT
'-1' AS dim_block_id,
-1 AS block_id,
'1900-01-01' :: datetime AS block_timestamp,
NULL AS block_date,
NULL AS block_hour,
NULL AS block_week,
NULL AS block_month,
NULL AS block_quarter,
NULL AS block_year,
NULL AS block_DAYOFMONTH,
NULL AS block_DAYOFWEEK,
NULL AS block_DAYOFYEAR,
NULL AS TIMESTAMP,
NULL AS HASH,
NULL AS agg_state,
'1900-01-01' :: DATE AS _inserted_timestamp,
'{{ invocation_id }}' AS _invocation_id,
'1900-01-01' :: DATE AS inserted_timestamp,
'1900-01-01' :: DATE AS modified_timestamp
UNION ALL
SELECT
'-2' AS dim_block_id,
-2 AS block_id,
NULL AS block_timestamp,
NULL AS block_date,
NULL AS block_hour,
NULL AS block_week,
NULL AS block_month,
NULL AS block_quarter,
NULL AS block_year,
NULL AS block_DAYOFMONTH,
NULL AS block_DAYOFWEEK,
NULL AS block_DAYOFYEAR,
NULL AS TIMESTAMP,
NULL AS HASH,
NULL AS agg_state,
'1900-01-01' :: DATE AS _inserted_timestamp,
'{{ invocation_id }}' AS _invocation_id,
'1900-01-01' :: DATE AS inserted_timestamp,
'1900-01-01' :: DATE AS modified_timestamp
{% endif %}

View File

@ -0,0 +1,68 @@
version: 2
models:
- name: core__dim_block
description: "Records of all blocks that have occurred on Thorchain, dating back to the genesis block. "
columns:
- name: DIM_BLOCK_ID
description: "{{ doc('sk') }}"
tests:
- dbt_expectations.expect_column_to_exist
- name: BLOCK_ID
description: "{{ doc('block_id') }}"
tests:
- not_null:
where: DIM_BLOCK_ID not in ('-1','-2')
- name: BLOCK_TIMESTAMP
description: "{{ doc('block_timestamp') }}"
tests:
- not_null:
where: DIM_BLOCK_ID not in ('-1','-2')
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 1
- name: BLOCK_DATE
description: "{{ doc('block_date') }}"
tests:
- not_null:
where: DIM_BLOCK_ID not in ('-1','-2')
- name: BLOCK_HOUR
description: "{{ doc('block_date') }}"
tests:
- not_null:
where: DIM_BLOCK_ID not in ('-1','-2')
- name: BLOCK_WEEK
description: "{{ doc('block_date') }}"
tests:
- not_null:
where: DIM_BLOCK_ID not in ('-1','-2')
- name: BLOCK_MONTH
description: "The numeric month of block minting(without a timezone)"
- name: BLOCK_QUARTER
description: "The numeric quarter of block minting(without a timezone)"
- name: BLOCK_YEAR
description: "The numeric year of block minting(without a timezone)"
- name: BLOCK_DAYOFMONTH
description: "The numeric day of month of block minting(without a timezone)"
- name: BLOCK_DAYOFWEEK
description: "The numeric day of week of block minting(without a timezone)"
- name: BLOCK_DAYOFYEAR
description: "The numeric day of year of block minting(without a timezone)"
- name: TIMESTAMP
description: "integer value of the block_teimstamp"
tests:
- not_null:
where: DIM_BLOCK_ID not in ('-1','-2')
- name: HASH
description: "block hash"
tests:
- not_null:
where: DIM_BLOCK_ID not in ('-1','-2')
- name: agg_state
description: ""
- name: INSERTED_TIMESTAMP
description: '{{ doc("inserted_timestamp") }}'
- name: MODIFIED_TIMESTAMP
description: '{{ doc("modified_timestamp") }}'
tests:
- dbt_constraints.primary_key:
column_name: DIM_BLOCK_ID

View File

@ -0,0 +1,6 @@
{{ config(
materialized = 'view'
) }}
SELECT
'2.10.0' AS midgard_version

View File

@ -0,0 +1,77 @@
{{ config(
materialized = 'incremental',
meta ={ 'database_tags':{ 'table':{ 'PURPOSE': 'DEX, AMM' }} },
unique_key = 'fact_mayaname_change_events_id',
incremental_strategy = 'merge',
incremental_predicates = ['DBT_INTERNAL_DEST.block_timestamp >= (select min(block_timestamp) from ' ~ generate_tmp_view_name(this) ~ ')'],
cluster_by = ['block_timestamp::DATE']
) }}
WITH base AS (
SELECT
NAME,
chain,
address,
registration_fee_e8,
fund_amount_e8,
height,
expire,
owner,
tx_id,
memo,
sender,
preferred_asset,
affiliate_bps,
sub_affiliates,
event_id,
block_timestamp,
_INSERTED_TIMESTAMP
FROM
{{ ref('silver__mayaname_change_events') }}
)
SELECT
{{ dbt_utils.generate_surrogate_key(
['a.event_id','a.name']
) }} AS fact_mayaname_change_events_id,
b.block_timestamp,
COALESCE(
b.dim_block_id,
'-1'
) AS dim_block_id,
NAME,
chain,
address,
registration_fee_e8,
fund_amount_e8,
height,
expire,
owner,
tx_id,
memo,
sender,
preferred_asset,
affiliate_bps,
sub_affiliates,
event_id,
A._INSERTED_TIMESTAMP,
'{{ invocation_id }}' AS _invocation_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp
FROM
base A
JOIN {{ ref('core__dim_block') }}
b
ON A.block_timestamp = b.timestamp
{% if is_incremental() %}
WHERE
b.block_timestamp >= (
SELECT
MAX(
block_timestamp - INTERVAL '1 HOUR'
)
FROM
{{ this }}
)
{% endif %}

View File

@ -0,0 +1,45 @@
version: 2
models:
- name: core__fact_mayaname_change_events
description: "Fact table that shows name change events"
columns:
- name: FACT_MAYANAME_CHANGE_EVENTS_ID
description: "{{ doc('sk') }}"
tests:
- dbt_expectations.expect_column_to_exist
- unique
- name: BLOCK_TIMESTAMP
description: "{{ doc('block_timestamp') }}"
tests:
- not_null:
where: DIM_BLOCK_ID not in ('-1','-2')
- name: DIM_BLOCK_ID
description: "FK to DIM_BLOCK table"
tests:
- negative_one:
where: _inserted_timestamp < (CURRENT_TIMESTAMP - INTERVAL '8 HOURS')
- name: OWNER
description: ""
- name: CHAIN
description: "{{ doc('blockchain') }}"
- name: ADDRESS
description: "{{ doc('address') }}"
- name: expire
description: ""
- name: NAME
description: ""
- name: FUND_AMOUNT_E8
description: ""
- name: REGISTRATION_FEE_E8
description: ""
- name: INSERTED_TIMESTAMP
description: '{{ doc("inserted_timestamp") }}'
- name: MODIFIED_TIMESTAMP
description: '{{ doc("modified_timestamp") }}'
tests:
- dbt_constraints.primary_key:
column_name: FACT_MAYANAME_CHANGE_EVENTS_ID
- dbt_constraints.foreign_key:
fk_column_name: DIM_BLOCK_ID
pk_table_name: ref('core__dim_block')
pk_column_name: DIM_BLOCK_ID

View File

@ -0,0 +1,52 @@
{{ config(
materialized = 'incremental',
meta ={ 'database_tags':{ 'table':{ 'PURPOSE': 'DEX, AMM' }} },
unique_key = 'fact_set_mimir_events_id',
incremental_strategy = 'merge',
incremental_predicates = ['DBT_INTERNAL_DEST.block_timestamp >= (select min(block_timestamp) from ' ~ generate_tmp_view_name(this) ~ ')'],
cluster_by = ['block_timestamp::DATE']
) }}
WITH base AS (
SELECT
key,
VALUE,
event_id,
block_timestamp,
_INSERTED_TIMESTAMP
FROM
{{ ref('silver__set_mimir_events') }}
)
SELECT
{{ dbt_utils.generate_surrogate_key(
['a.event_id','a.key','a.block_timestamp']
) }} AS fact_set_mimir_events_id,
b.block_timestamp,
COALESCE(
b.dim_block_id,
'-1'
) AS dim_block_id,
key,
VALUE,
A._INSERTED_TIMESTAMP,
'{{ invocation_id }}' AS _invocation_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp
FROM
base A
JOIN {{ ref('core__dim_block') }}
b
ON A.block_timestamp = b.timestamp
{% if is_incremental() %}
WHERE
b.block_timestamp >= (
SELECT
MAX(
block_timestamp - INTERVAL '1 HOUR'
)
FROM
{{ this }}
)
{% endif %}

View File

@ -0,0 +1,35 @@
version: 2
models:
- name: core__fact_set_mimir_events
description: "Fact table that shows set mimir events"
columns:
- name: FACT_SET_MIMIR_EVENTS_ID
description: "{{ doc('sk') }}"
tests:
- dbt_expectations.expect_column_to_exist
- unique
- name: BLOCK_TIMESTAMP
description: "{{ doc('block_timestamp') }}"
tests:
- not_null:
where: DIM_BLOCK_ID not in ('-1','-2')
- name: DIM_BLOCK_ID
description: "FK to DIM_BLOCK table"
tests:
- negative_one:
where: _inserted_timestamp < (CURRENT_TIMESTAMP - INTERVAL '8 HOURS')
- name: KEY
description: ""
- name: VALUE
description: ""
- name: INSERTED_TIMESTAMP
description: '{{ doc("inserted_timestamp") }}'
- name: MODIFIED_TIMESTAMP
description: '{{ doc("modified_timestamp") }}'
tests:
- dbt_constraints.primary_key:
column_name: FACT_SET_MIMIR_EVENTS_ID
- dbt_constraints.foreign_key:
fk_column_name: DIM_BLOCK_ID
pk_table_name: ref('core__dim_block')
pk_column_name: DIM_BLOCK_ID

View File

@ -0,0 +1,56 @@
{{ config(
materialized = 'incremental',
meta ={ 'database_tags':{ 'table':{ 'PURPOSE': 'DEX, AMM' }} },
unique_key = 'fact_transfer_events_id',
incremental_strategy = 'merge',
incremental_predicates = ['DBT_INTERNAL_DEST.block_timestamp >= (select min(block_timestamp) from ' ~ generate_tmp_view_name(this) ~ ')'],
cluster_by = ['block_timestamp::DATE']
) }}
WITH base AS (
SELECT
from_address,
to_address,
asset,
amount_e8,
event_id,
block_timestamp,
_INSERTED_TIMESTAMP
FROM
{{ ref('silver__transfer_events') }}
)
SELECT
{{ dbt_utils.generate_surrogate_key(
['a.event_id', 'a.from_address', 'a.to_address', 'a.asset', 'a.amount_e8']
) }} AS fact_transfer_events_id,
b.block_timestamp,
COALESCE(
b.dim_block_id,
'-1'
) AS dim_block_id,
from_address,
to_address,
asset,
amount_e8,
A._inserted_timestamp,
'{{ invocation_id }}' AS _invocation_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp
FROM
base A
JOIN {{ ref('core__dim_block') }}
b
ON A.block_timestamp = b.timestamp
{% if is_incremental() %}
WHERE
b.block_timestamp >= (
SELECT
MAX(
block_timestamp - INTERVAL '1 HOUR'
)
FROM
{{ this }}
)
{% endif %}

View File

@ -0,0 +1,48 @@
version: 2
models:
- name: core__fact_transfer_events
description: "Fact table containing stake events"
columns:
- name: FACT_TRANSFER_EVENTS_ID
description: "{{ doc('sk') }}"
tests:
- dbt_expectations.expect_column_to_exist
- unique
- name: BLOCK_TIMESTAMP
description: "{{ doc('block_timestamp') }}"
tests:
- not_null:
where: DIM_BLOCK_ID not in ('-1','-2')
- name: DIM_BLOCK_ID
description: "FK to DIM_BLOCK table"
tests:
- negative_one:
where: _inserted_timestamp < (CURRENT_TIMESTAMP - INTERVAL '8 HOURS')
- name: FROM_ADDRESS
description: "{{ doc('address') }}"
tests:
- not_null
- name: TO_ADDRESS
description: "{{ doc('address') }}"
tests:
- not_null
- name: ASSET
description: "{{ doc('asset') }}"
tests:
- not_null
- name: AMOUNT_E8
description: "The asset amount for this event"
tests:
- not_null
- name: INSERTED_TIMESTAMP
description: '{{ doc("inserted_timestamp") }}'
- name: MODIFIED_TIMESTAMP
description: '{{ doc("modified_timestamp") }}'
tests:
- dbt_constraints.primary_key:
column_name: FACT_TRANSFER_EVENTS_ID
- dbt_constraints.foreign_key:
fk_column_name: DIM_BLOCK_ID
pk_table_name: ref('core__dim_block')
pk_column_name: DIM_BLOCK_ID

View File

@ -0,0 +1,58 @@
{{ config(
materialized = 'incremental',
meta ={ 'database_tags':{ 'table':{ 'PURPOSE': 'DEX, AMM' }} },
unique_key = 'fact_transfers_id',
incremental_strategy = 'merge',
incremental_predicates = ['DBT_INTERNAL_DEST.block_timestamp >= (select min(block_timestamp) from ' ~ generate_tmp_view_name(this) ~ ')'],
cluster_by = ['block_timestamp::DATE']
) }}
WITH base AS (
SELECT
block_id,
from_address,
to_address,
asset,
cacao_amount,
cacao_amount_usd,
_unique_key,
_INSERTED_TIMESTAMP
FROM
{{ ref('silver__transfers') }}
{% if is_incremental() %}
WHERE
block_timestamp >= (
SELECT
MAX(
block_timestamp - INTERVAL '1 HOUR'
)
FROM
{{ this }}
)
{% endif %}
)
SELECT
{{ dbt_utils.generate_surrogate_key(
['a._unique_key']
) }} AS fact_transfers_id,
b.block_timestamp,
COALESCE(
b.dim_block_id,
'-1'
) AS dim_block_id,
from_address,
to_address,
asset,
cacao_amount,
cacao_amount_usd,
A._inserted_timestamp,
'{{ invocation_id }}' AS _invocation_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp
FROM
base A
JOIN {{ ref('core__dim_block') }}
b
ON A.block_id = b.block_id

View File

@ -0,0 +1,52 @@
version: 2
models:
- name: core__fact_transfers
description: "Fact table shows the transfer action between different address"
columns:
- name: FACT_TRANSFERS_ID
description: "{{ doc('sk') }}"
tests:
- dbt_expectations.expect_column_to_exist
- unique
- name: BLOCK_TIMESTAMP
description: "{{ doc('block_timestamp') }}"
tests:
- not_null:
where: DIM_BLOCK_ID not in ('-1','-2')
- name: DIM_BLOCK_ID
description: "FK to DIM_BLOCK table"
tests:
- negative_one:
where: _inserted_timestamp < (CURRENT_TIMESTAMP - INTERVAL '8 HOURS')
- name: FROM_ADDRESS
description: "{{ doc('address') }}"
tests:
- not_null
- name: TO_ADDRESS
description: "{{ doc('address') }}"
tests:
- not_null
- name: ASSET
description: "{{ doc('asset') }}"
tests:
- not_null
- name: cacao_AMOUNT
description: "The transferred cacao amount"
tests:
- not_null
- name: cacao_AMOUNT_USD
description: "The transferred cacao amount in USD"
tests:
- not_null
- name: INSERTED_TIMESTAMP
description: '{{ doc("inserted_timestamp") }}'
- name: MODIFIED_TIMESTAMP
description: '{{ doc("modified_timestamp") }}'
tests:
- dbt_constraints.primary_key:
column_name: FACT_TRANSFERS_ID
- dbt_constraints.foreign_key:
fk_column_name: DIM_BLOCK_ID
pk_table_name: ref('core__dim_block')
pk_column_name: DIM_BLOCK_ID

View File

@ -0,0 +1,50 @@
{{ config(
materialized = 'incremental',
meta ={ 'database_tags':{ 'table':{ 'PURPOSE': 'DEX, AMM' }} },
unique_key = 'fact_active_vault_events_id',
incremental_strategy = 'merge',
incremental_predicates = ['DBT_INTERNAL_DEST.block_timestamp >= (select min(block_timestamp) from ' ~ generate_tmp_view_name(this) ~ ')'],
cluster_by = ['block_timestamp::DATE']
) }}
WITH base AS (
SELECT
block_timestamp,
add_asgard_addr,
event_id,
_inserted_timestamp
FROM
{{ ref('silver__active_vault_events') }}
)
SELECT
{{ dbt_utils.generate_surrogate_key(
['a.event_id','a.block_timestamp','a.add_asgard_addr']
) }} AS fact_active_vault_events_id,
b.block_timestamp,
COALESCE(
b.dim_block_id,
'-1'
) AS dim_block_id,
add_asgard_addr,
A._inserted_timestamp,
'{{ invocation_id }}' AS _invocation_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp
FROM
base A
JOIN {{ ref('core__dim_block') }}
b
ON A.block_timestamp = b.timestamp
{% if is_incremental() %}
WHERE
b.block_timestamp >= (
SELECT
MAX(
block_timestamp - INTERVAL '1 HOUR'
)
FROM
{{ this }}
)
{% endif %}

View File

@ -0,0 +1,35 @@
version: 2
models:
- name: defi__fact_active_vault_events
description: "Fact table containing the events triggered by the churning activities"
columns:
- name: FACT_ACTIVE_VAULT_EVENTS_ID
description: "{{ doc('sk') }}"
tests:
- dbt_expectations.expect_column_to_exist
- unique
- name: BLOCK_TIMESTAMP
description: "{{ doc('block_timestamp') }}"
tests:
- not_null:
where: DIM_BLOCK_ID not in ('-1','-2')
- name: DIM_BLOCK_ID
description: "FK to DIM_BLOCK table"
tests:
- negative_one:
where: _inserted_timestamp < (CURRENT_TIMESTAMP - INTERVAL '8 HOURS')
- name: ADD_ASGARD_ADDR
description: "The asgard address added to the vault"
tests:
- not_null
- name: INSERTED_TIMESTAMP
description: '{{ doc("inserted_timestamp") }}'
- name: MODIFIED_TIMESTAMP
description: '{{ doc("modified_timestamp") }}'
tests:
- dbt_constraints.primary_key:
column_name: FACT_ACTIVE_VAULT_EVENTS_ID
- dbt_constraints.foreign_key:
fk_column_name: DIM_BLOCK_ID
pk_table_name: ref('core__dim_block')
pk_column_name: DIM_BLOCK_ID

View File

@ -0,0 +1,75 @@
{{ config(
materialized = 'incremental',
meta ={ 'database_tags':{ 'table':{ 'PURPOSE': 'DEX, AMM' }} },
unique_key = 'fact_add_events_id',
incremental_strategy = 'merge',
incremental_predicates = ['DBT_INTERNAL_DEST.block_timestamp >= (select min(block_timestamp) from ' ~ generate_tmp_view_name(this) ~ ')'],
cluster_by = ['block_timestamp::DATE']
) }}
WITH base AS (
SELECT
e.block_timestamp,
e.tx_id,
e.cacao_e8,
e.blockchain,
e.asset_e8,
e.pool_name,
e.memo,
e.to_address,
e.from_address,
e.asset,
e.event_id,
_inserted_timestamp
FROM
{{ ref('silver__add_events') }}
e
)
SELECT
{{ dbt_utils.generate_surrogate_key(
['a.event_id','a.tx_id','a.blockchain','a.from_address','a.to_address','a.asset','a.memo','a.block_timestamp']
) }} AS fact_add_events_id,
b.block_timestamp,
COALESCE(
b.dim_block_id,
'-1'
) AS dim_block_id,
A.tx_id,
A.cacao_e8,
A.blockchain,
A.asset_e8,
A.pool_name,
A.memo,
A.to_address,
A.from_address,
A.asset,
A._inserted_timestamp,
'{{ invocation_id }}' AS _invocation_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp
FROM
base A
JOIN {{ ref('core__dim_block') }}
b
ON A.block_timestamp = b.timestamp
{% if is_incremental() %}
WHERE
b.block_timestamp >= (
SELECT
MAX(
block_timestamp - INTERVAL '1 HOUR'
)
FROM
{{ this }}
)
OR tx_id IN (
SELECT
tx_id
FROM
{{ this }}
WHERE
dim_block_id = '-1'
)
{% endif %}

View File

@ -0,0 +1,58 @@
version: 2
models:
- name: defi__fact_add_events
description: "Fact table containing add events"
columns:
- name: FACT_ADD_EVENTS_ID
description: "{{ doc('sk') }}"
tests:
- dbt_expectations.expect_column_to_exist
- unique
- name: BLOCK_TIMESTAMP
description: "{{ doc('block_timestamp') }}"
tests:
- not_null:
where: DIM_BLOCK_ID not in ('-1','-2')
- name: DIM_BLOCK_ID
description: "FK to DIM_BLOCK table"
tests:
- negative_one:
where: _inserted_timestamp < (CURRENT_TIMESTAMP - INTERVAL '8 HOURS')
- name: cacao_E8
description: "The amount of cacao for this add event"
tests:
- not_null
- name: BLOCKCHAIN
description: "{{ doc('blockchain') }}"
tests:
- not_null
- name: ASSET_E8
tests:
- not_null
- name: POOL_NAME
description: "{{ doc('pool_name') }}"
tests:
- not_null
- name: MEMO
description: "{{ doc('memo') }}"
tests:
- not_null
- name: TO_ADDRESS
description: "{{ doc('to_address') }}"
tests:
- not_null
- name: FROM_ADDRESS
description: "{{ doc('from_address') }}"
tests:
- not_null
- name: INSERTED_TIMESTAMP
description: '{{ doc("inserted_timestamp") }}'
- name: MODIFIED_TIMESTAMP
description: '{{ doc("modified_timestamp") }}'
tests:
- dbt_constraints.primary_key:
column_name: FACT_ADD_EVENTS_ID
- dbt_constraints.foreign_key:
fk_column_name: DIM_BLOCK_ID
pk_table_name: ref('core__dim_block')
pk_column_name: DIM_BLOCK_ID

View File

@ -0,0 +1,63 @@
{{ config(
materialized = 'incremental',
meta ={ 'database_tags':{ 'table':{ 'PURPOSE': 'DEX, AMM' }} },
unique_key = 'fact_pool_depths_id',
incremental_strategy = 'merge',
incremental_predicates = ['DBT_INTERNAL_DEST.block_timestamp >= (select min(block_timestamp) from ' ~ generate_tmp_view_name(this) ~ ')'],
cluster_by = ['block_timestamp::DATE']
) }}
WITH base AS (
SELECT
pool_name,
asset_e8,
cacao_e8,
synth_e8,
block_timestamp,
_inserted_timestamp
FROM
{{ ref('silver__block_pool_depths') }}
)
SELECT
{{ dbt_utils.generate_surrogate_key(
['a.pool_name','a.block_timestamp']
) }} AS fact_pool_depths_id,
b.block_timestamp,
COALESCE(
b.dim_block_id,
'-1'
) AS dim_block_id,
cacao_e8,
asset_e8,
synth_e8,
pool_name,
A._inserted_timestamp,
'{{ invocation_id }}' AS _invocation_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp
FROM
base A
JOIN {{ ref('core__dim_block') }}
b
ON A.block_timestamp = b.timestamp
{% if is_incremental() %}
WHERE
b.block_timestamp >= (
SELECT
MAX(
block_timestamp - INTERVAL '1 HOUR'
)
FROM
{{ this }}
)
OR pool_name IN (
SELECT
pool_name
FROM
{{ this }}
WHERE
dim_block_id = '-1'
)
{% endif %}

View File

@ -0,0 +1,47 @@
version: 2
models:
- name: defi__fact_block_pool_depths
description: "Fact table containing all the available pools and its cacao/asset depth at each block interval"
columns:
- name: FACT_POOL_DEPTHS_ID
description: "{{ doc('sk') }}"
tests:
- dbt_expectations.expect_column_to_exist
- unique
- name: BLOCK_TIMESTAMP
description: "{{ doc('block_timestamp') }}"
tests:
- not_null:
where: DIM_BLOCK_ID not in ('-1','-2')
- name: DIM_BLOCK_ID
description: "FK to DIM_BLOCK table"
tests:
- negative_one:
where: _inserted_timestamp < (CURRENT_TIMESTAMP - INTERVAL '8 HOURS')
- name: cacao_E8
description: "The cacao depth for this pool at this block"
tests:
- not_null
- name: ASSET_E8
description: "The asset depth for this pool at this block"
tests:
- not_null
- name: SYNTH_E8
description: ""
tests:
- not_null
- name: POOL_NAME
description: "{{ doc('pool_name') }}"
tests:
- not_null
- name: INSERTED_TIMESTAMP
description: '{{ doc("inserted_timestamp") }}'
- name: MODIFIED_TIMESTAMP
description: '{{ doc("modified_timestamp") }}'
tests:
- dbt_constraints.primary_key:
column_name: FACT_POOL_DEPTHS_ID
- dbt_constraints.foreign_key:
fk_column_name: DIM_BLOCK_ID
pk_table_name: ref('core__dim_block')
pk_column_name: DIM_BLOCK_ID

Some files were not shown because too many files have changed in this diff Show More