mirror of
https://github.com/FlipsideCrypto/movement-models.git
synced 2026-02-06 11:46:43 +00:00
repo init
This commit is contained in:
parent
5453a0af50
commit
418e4cfca0
18
.gitignore
vendored
Normal file
18
.gitignore
vendored
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
target/
|
||||||
|
dbt_modules/
|
||||||
|
# newer versions of dbt use this directory instead of dbt_modules for test dependencies
|
||||||
|
dbt_packages/
|
||||||
|
logs/
|
||||||
|
|
||||||
|
.venv/
|
||||||
|
.python-version
|
||||||
|
|
||||||
|
# Visual Studio Code files
|
||||||
|
*/.vscode
|
||||||
|
*.code-workspace
|
||||||
|
.history/
|
||||||
|
**/.DS_Store
|
||||||
|
.vscode/
|
||||||
|
.env
|
||||||
|
.DS_Store
|
||||||
|
.user.yml
|
||||||
75
README.md
Normal file
75
README.md
Normal file
@ -0,0 +1,75 @@
|
|||||||
|
|
||||||
|
## Profile Set Up
|
||||||
|
|
||||||
|
#### Use the following within profiles.yml
|
||||||
|
----
|
||||||
|
|
||||||
|
```yml
|
||||||
|
movement:
|
||||||
|
target: dev
|
||||||
|
outputs:
|
||||||
|
dev:
|
||||||
|
type: snowflake
|
||||||
|
account: <ACCOUNT>
|
||||||
|
role: <ROLE>
|
||||||
|
user: <USERNAME>
|
||||||
|
password: <PASSWORD>
|
||||||
|
region: <REGION>
|
||||||
|
database: movement_dev
|
||||||
|
warehouse: <WAREHOUSE>
|
||||||
|
schema: silver
|
||||||
|
threads: 4
|
||||||
|
client_session_keep_alive: False
|
||||||
|
query_tag: <TAG>
|
||||||
|
```
|
||||||
|
|
||||||
|
### Resources:
|
||||||
|
- Learn more about dbt [in the docs](https://docs.getdbt.com/docs/introduction)
|
||||||
|
- Check out [Discourse](https://discourse.getdbt.com/) for commonly asked questions and answers
|
||||||
|
- Join the [chat](https://community.getdbt.com/) on Slack for live discussions and support
|
||||||
|
- Find [dbt events](https://events.getdbt.com) near you
|
||||||
|
- Check out [the blog](https://blog.getdbt.com/) for the latest news on dbt's development and best practices
|
||||||
|
|
||||||
|
- Check out [the blog](https://blog.getdbt.com/) for the latest news on dbt's development and best practices
|
||||||
|
|
||||||
|
## Applying Model Tags
|
||||||
|
|
||||||
|
### Database / Schema level tags
|
||||||
|
|
||||||
|
Database and schema tags are applied via the `add_database_or_schema_tags` macro. These tags are inherited by their downstream objects. To add/modify tags call the appropriate tag set function within the macro.
|
||||||
|
|
||||||
|
```
|
||||||
|
{{ set_database_tag_value('SOME_DATABASE_TAG_KEY','SOME_DATABASE_TAG_VALUE') }}
|
||||||
|
{{ set_schema_tag_value('SOME_SCHEMA_TAG_KEY','SOME_SCHEMA_TAG_VALUE') }}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Model tags
|
||||||
|
|
||||||
|
To add/update a model's snowflake tags, add/modify the `meta` model property under `config`. Only table level tags are supported at this time via DBT.
|
||||||
|
|
||||||
|
```
|
||||||
|
{{ config(
|
||||||
|
...,
|
||||||
|
meta={
|
||||||
|
'database_tags':{
|
||||||
|
'table': {
|
||||||
|
'PURPOSE': 'SOME_PURPOSE'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
...
|
||||||
|
) }}
|
||||||
|
```
|
||||||
|
|
||||||
|
By default, model tags are not pushed to snowflake on each load. You can push a tag update for a model by specifying the `UPDATE_SNOWFLAKE_TAGS` project variable during a run.
|
||||||
|
|
||||||
|
```
|
||||||
|
dbt run --var '{"UPDATE_SNOWFLAKE_TAGS":True}' -s models/core/core__fact_swaps.sql
|
||||||
|
```
|
||||||
|
|
||||||
|
### Querying for existing tags on a model in snowflake
|
||||||
|
|
||||||
|
```
|
||||||
|
select *
|
||||||
|
from table(movement.information_schema.tag_references('movement.core.fact_blocks', 'table'));
|
||||||
|
```
|
||||||
0
analyses/.gitkeep
Normal file
0
analyses/.gitkeep
Normal file
0
data/.gitkeep
Normal file
0
data/.gitkeep
Normal file
6
data/github_actions__workflows.csv
Normal file
6
data/github_actions__workflows.csv
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
workflow_name,workflow_schedule
|
||||||
|
dbt_run_streamline_blocks_tx_realtime,"0,20,40 * * * *"
|
||||||
|
dbt_run_streamline_transactions_realtime,"15,55 * * * *"
|
||||||
|
dbt_run_incremental_core,"15,35,55 * * * *"
|
||||||
|
dbt_test_intraday,"20 */4 * * *"
|
||||||
|
dbt_test_tasks,"0,30 * * * *"
|
||||||
|
113
dbt_project.yml
Normal file
113
dbt_project.yml
Normal file
@ -0,0 +1,113 @@
|
|||||||
|
# Name your project! Project names should contain only lowercase characters
|
||||||
|
# and underscores. A good package name should reflect your organization's
|
||||||
|
# name or the intended use of these models
|
||||||
|
name: "movement_models"
|
||||||
|
version: "1.0.0"
|
||||||
|
config-version: 2
|
||||||
|
|
||||||
|
require-dbt-version: ">=1.9.0"
|
||||||
|
|
||||||
|
# This setting configures which "profile" dbt uses for this project.
|
||||||
|
profile: "movement"
|
||||||
|
|
||||||
|
# These configurations specify where dbt should look for different types of files.
|
||||||
|
# The `source-paths` config, for example, states that models in this project can be
|
||||||
|
# found in the "models/" directory. You probably won't need to change these!
|
||||||
|
model-paths: ["models"]
|
||||||
|
analysis-paths: ["analysis"]
|
||||||
|
test-paths: ["tests"]
|
||||||
|
seed-paths: ["data"]
|
||||||
|
macro-paths: ["macros"]
|
||||||
|
snapshot-paths: ["snapshots"]
|
||||||
|
|
||||||
|
target-path: "target" # directory which will store compiled SQL files
|
||||||
|
clean-targets: # directories to be removed by `dbt clean`
|
||||||
|
- "target"
|
||||||
|
- "dbt_modules"
|
||||||
|
- "dbt_packages"
|
||||||
|
|
||||||
|
tests:
|
||||||
|
+store_failures: true # all tests
|
||||||
|
|
||||||
|
on-run-start:
|
||||||
|
- "{{ create_sps() }}"
|
||||||
|
- "{{ create_udfs() }}"
|
||||||
|
|
||||||
|
on-run-end:
|
||||||
|
- '{{ apply_meta_as_tags(results) }}'
|
||||||
|
|
||||||
|
dispatch:
|
||||||
|
- macro_namespace: dbt
|
||||||
|
search_order:
|
||||||
|
- movement-models
|
||||||
|
- dbt_snowflake_query_tags
|
||||||
|
- dbt
|
||||||
|
|
||||||
|
query-comment:
|
||||||
|
comment: '{{ dbt_snowflake_query_tags.get_query_comment(node) }}'
|
||||||
|
append: true # Snowflake removes prefixed comments.
|
||||||
|
|
||||||
|
# Configuring models
|
||||||
|
# Full documentation: https://docs.getdbt.com/docs/configuring-models
|
||||||
|
|
||||||
|
models:
|
||||||
|
+copy_grants: true
|
||||||
|
+persist_docs:
|
||||||
|
relation: true
|
||||||
|
columns: true
|
||||||
|
+on_schema_change: "append_new_columns"
|
||||||
|
|
||||||
|
movement_models:
|
||||||
|
gold: # This will apply to ALL models under the gold directory
|
||||||
|
+tests:
|
||||||
|
- dbt_utils.recency:
|
||||||
|
datepart: hour
|
||||||
|
field: BLOCK_TIMESTAMP
|
||||||
|
interval: 3
|
||||||
|
severity: error
|
||||||
|
tags: ['recency']
|
||||||
|
|
||||||
|
# In this example config, we tell dbt to build all models in the example/ directory
|
||||||
|
# as tables. These settings can be overridden in the individual model files
|
||||||
|
# using the `{{ config(...) }}` macro.
|
||||||
|
|
||||||
|
vars:
|
||||||
|
"dbt_date:time_zone": GMT
|
||||||
|
STREAMLINE_INVOKE_STREAMS: false
|
||||||
|
STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES: false
|
||||||
|
UPDATE_UDFS_AND_SPS: false
|
||||||
|
UPDATE_SNOWFLAKE_TAGS: True
|
||||||
|
OBSERV_FULL_TEST: false
|
||||||
|
START_GHA_TASKS: false
|
||||||
|
BRONZE_LOOKBACK_DAYS: '{{ env_var("BRONZE_LOOKBACK_DAYS", 3) }}'
|
||||||
|
|
||||||
|
#### STREAMLINE 2.0 BEGIN ####
|
||||||
|
|
||||||
|
API_INTEGRATION: '{{ var("config")[target.name]["API_INTEGRATION"] if var("config")[target.name] else var("config")["dev"]["API_INTEGRATION"] }}'
|
||||||
|
EXTERNAL_FUNCTION_URI: '{{ var("config")[target.name]["EXTERNAL_FUNCTION_URI"] if var("config")[target.name] else var("config")["dev"]["EXTERNAL_FUNCTION_URI"] }}'
|
||||||
|
ROLES: '{{ var("config")[target.name]["ROLES"] }}'
|
||||||
|
|
||||||
|
config:
|
||||||
|
# The keys correspond to dbt profiles and are case sensitive
|
||||||
|
dev:
|
||||||
|
API_INTEGRATION: aws_movement_api_stg_v2
|
||||||
|
EXTERNAL_FUNCTION_URI: qjj5rutl05.execute-api.us-east-1.amazonaws.com/stg/
|
||||||
|
ROLES:
|
||||||
|
- AWS_LAMBDA_MOVEMENT_API
|
||||||
|
- INTERNAL_DEV
|
||||||
|
dev-2xl:
|
||||||
|
API_INTEGRATION: aws_movement_api_stg_v2
|
||||||
|
EXTERNAL_FUNCTION_URI: qjj5rutl05.execute-api.us-east-1.amazonaws.com/stg/
|
||||||
|
ROLES:
|
||||||
|
- AWS_LAMBDA_MOVEMENT_API
|
||||||
|
- INTERNAL_DEV
|
||||||
|
|
||||||
|
prod:
|
||||||
|
API_INTEGRATION: aws_movement_api_prod_v2
|
||||||
|
EXTERNAL_FUNCTION_URI: d0t060jjxf.execute-api.us-east-1.amazonaws.com/prod/
|
||||||
|
ROLES:
|
||||||
|
- AWS_LAMBDA_MOVEMENT_API
|
||||||
|
- INTERNAL_DEV
|
||||||
|
- DBT_CLOUD_MOVEMENT
|
||||||
|
|
||||||
|
#### STREAMLINE 2.0 END ####
|
||||||
0
docs/.gitkeep
Normal file
0
docs/.gitkeep
Normal file
0
macros/.gitkeep
Normal file
0
macros/.gitkeep
Normal file
6
macros/create_sps.sql
Normal file
6
macros/create_sps.sql
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
{% macro create_sps() %}
|
||||||
|
{% if target.database == 'MOVEMENT' %}
|
||||||
|
CREATE schema IF NOT EXISTS _internal;
|
||||||
|
{{ sp_create_prod_clone('_internal') }};
|
||||||
|
{% endif %}
|
||||||
|
{% endmacro %}
|
||||||
10
macros/create_udfs.sql
Normal file
10
macros/create_udfs.sql
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
{% macro create_udfs() %}
|
||||||
|
{% if var("UPDATE_UDFS_AND_SPS") %}
|
||||||
|
{% set sql %}
|
||||||
|
CREATE schema if NOT EXISTS silver;
|
||||||
|
{{ create_udf_bulk_rest_api_v2() }}
|
||||||
|
|
||||||
|
{% endset %}
|
||||||
|
{% do run_query(sql) %}
|
||||||
|
{% endif %}
|
||||||
|
{% endmacro %}
|
||||||
23
macros/custom_naming_macros.sql
Normal file
23
macros/custom_naming_macros.sql
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
{% macro generate_schema_name(
|
||||||
|
custom_schema_name = none,
|
||||||
|
node = none
|
||||||
|
) -%}
|
||||||
|
{% set node_name = node.name %}
|
||||||
|
{% set split_name = node_name.split('__') %}
|
||||||
|
{{ split_name [0] | trim }}
|
||||||
|
{%- endmacro %}
|
||||||
|
|
||||||
|
{% macro generate_alias_name(
|
||||||
|
custom_alias_name = none,
|
||||||
|
node = none
|
||||||
|
) -%}
|
||||||
|
{% set node_name = node.name %}
|
||||||
|
{% set split_name = node_name.split('__') %}
|
||||||
|
{{ split_name [1] | trim }}
|
||||||
|
{%- endmacro %}
|
||||||
|
|
||||||
|
{% macro generate_tmp_view_name(model_name) -%}
|
||||||
|
{% set node_name = model_name.name %}
|
||||||
|
{% set split_name = node_name.split('__') %}
|
||||||
|
{{ target.database ~ '.' ~ split_name[0] ~ '.' ~ split_name [1] ~ '__dbt_tmp' | trim }}
|
||||||
|
{%- endmacro %}
|
||||||
11
macros/custom_query_tag.sql
Normal file
11
macros/custom_query_tag.sql
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
{% macro set_query_tag() -%}
|
||||||
|
{% set new_json = {"repo":project_name, "object":this.table, "profile":target.profile_name, "env":target.name, "existing_tag":get_current_query_tag() } %}
|
||||||
|
{% set new_query_tag = tojson(new_json) | as_text %}
|
||||||
|
{% if new_query_tag %}
|
||||||
|
{% set original_query_tag = get_current_query_tag() %}
|
||||||
|
{{ log("Setting query_tag to '" ~ new_query_tag ~ "'. Will reset to '" ~ original_query_tag ~ "' after materialization.") }}
|
||||||
|
{% do run_query("alter session set query_tag = '{}'".format(new_query_tag)) %}
|
||||||
|
{{ return(original_query_tag)}}
|
||||||
|
{% endif %}
|
||||||
|
{{ return(none)}}
|
||||||
|
{% endmacro %}
|
||||||
16
macros/dbt/get_merge_sql.sql
Normal file
16
macros/dbt/get_merge_sql.sql
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
{% macro get_merge_sql(
|
||||||
|
target,
|
||||||
|
source,
|
||||||
|
unique_key,
|
||||||
|
dest_columns,
|
||||||
|
incremental_predicates
|
||||||
|
) -%}
|
||||||
|
{% set merge_sql = fsc_utils.get_merge_sql(
|
||||||
|
target,
|
||||||
|
source,
|
||||||
|
unique_key,
|
||||||
|
dest_columns,
|
||||||
|
incremental_predicates
|
||||||
|
) %}
|
||||||
|
{{ return(merge_sql) }}
|
||||||
|
{% endmacro %}
|
||||||
8
macros/dbt/get_tmp_relation_type.sql
Normal file
8
macros/dbt/get_tmp_relation_type.sql
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
{% macro dbt_snowflake_get_tmp_relation_type(
|
||||||
|
strategy,
|
||||||
|
unique_key,
|
||||||
|
language
|
||||||
|
) %}
|
||||||
|
-- always table
|
||||||
|
{{ return('table') }}
|
||||||
|
{% endmacro %}
|
||||||
10
macros/run_sp_create_prod_clone.sql
Normal file
10
macros/run_sp_create_prod_clone.sql
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
{% macro run_sp_create_prod_clone() %}
|
||||||
|
{% set clone_query %}
|
||||||
|
call movement._internal.create_prod_clone(
|
||||||
|
'movement',
|
||||||
|
'movement_dev',
|
||||||
|
'internal_dev'
|
||||||
|
);
|
||||||
|
{% endset %}
|
||||||
|
{% do run_query(clone_query) %}
|
||||||
|
{% endmacro %}
|
||||||
44
macros/sp_create_prod_clone.sql
Normal file
44
macros/sp_create_prod_clone.sql
Normal file
@ -0,0 +1,44 @@
|
|||||||
|
{% macro sp_create_prod_clone(target_schema) -%}
|
||||||
|
|
||||||
|
create or replace procedure {{ target_schema }}.create_prod_clone(source_db_name string, destination_db_name string, role_name string)
|
||||||
|
returns boolean
|
||||||
|
language javascript
|
||||||
|
execute as caller
|
||||||
|
as
|
||||||
|
$$
|
||||||
|
snowflake.execute({sqlText: `BEGIN TRANSACTION;`});
|
||||||
|
try {
|
||||||
|
snowflake.execute({sqlText: `CREATE OR REPLACE DATABASE ${DESTINATION_DB_NAME} CLONE ${SOURCE_DB_NAME}`});
|
||||||
|
snowflake.execute({sqlText: `DROP SCHEMA IF EXISTS ${DESTINATION_DB_NAME}._INTERNAL`}); /* this only needs to be in prod */
|
||||||
|
|
||||||
|
snowflake.execute({sqlText: `GRANT OWNERSHIP ON ALL SCHEMAS IN DATABASE ${DESTINATION_DB_NAME} TO ROLE ${ROLE_NAME} COPY CURRENT GRANTS;`});
|
||||||
|
snowflake.execute({sqlText: `GRANT OWNERSHIP ON ALL FUNCTIONS IN DATABASE ${DESTINATION_DB_NAME} TO ROLE ${ROLE_NAME} COPY CURRENT GRANTS;`});
|
||||||
|
snowflake.execute({sqlText: `GRANT OWNERSHIP ON ALL PROCEDURES IN DATABASE ${DESTINATION_DB_NAME} TO ROLE ${ROLE_NAME} COPY CURRENT GRANTS;`});
|
||||||
|
snowflake.execute({sqlText: `GRANT OWNERSHIP ON ALL VIEWS IN DATABASE ${DESTINATION_DB_NAME} TO ROLE ${ROLE_NAME} COPY CURRENT GRANTS;`});
|
||||||
|
snowflake.execute({sqlText: `GRANT OWNERSHIP ON ALL STAGES IN DATABASE ${DESTINATION_DB_NAME} TO ROLE ${ROLE_NAME} COPY CURRENT GRANTS;`});
|
||||||
|
snowflake.execute({sqlText: `GRANT OWNERSHIP ON ALL TABLES IN DATABASE ${DESTINATION_DB_NAME} TO ROLE ${ROLE_NAME} COPY CURRENT GRANTS;`});
|
||||||
|
snowflake.execute({sqlText: `GRANT OWNERSHIP ON FUTURE FUNCTIONS IN DATABASE ${DESTINATION_DB_NAME} TO ROLE ${ROLE_NAME};`});
|
||||||
|
snowflake.execute({sqlText: `GRANT OWNERSHIP ON FUTURE PROCEDURES IN DATABASE ${DESTINATION_DB_NAME} TO ROLE ${ROLE_NAME};`});
|
||||||
|
snowflake.execute({sqlText: `GRANT OWNERSHIP ON FUTURE VIEWS IN DATABASE ${DESTINATION_DB_NAME} TO ROLE ${ROLE_NAME};`});
|
||||||
|
snowflake.execute({sqlText: `GRANT OWNERSHIP ON FUTURE STAGES IN DATABASE ${DESTINATION_DB_NAME} TO ROLE ${ROLE_NAME};`});
|
||||||
|
snowflake.execute({sqlText: `GRANT OWNERSHIP ON FUTURE TABLES IN DATABASE ${DESTINATION_DB_NAME} TO ROLE ${ROLE_NAME};`});
|
||||||
|
|
||||||
|
snowflake.execute({sqlText: `GRANT OWNERSHIP ON DATABASE ${DESTINATION_DB_NAME} TO ROLE ${ROLE_NAME} COPY CURRENT GRANTS;`})
|
||||||
|
|
||||||
|
var existing_tags = snowflake.execute({sqlText: `SHOW TAGS IN DATABASE ${DESTINATION_DB_NAME};`});
|
||||||
|
while (existing_tags.next()) {
|
||||||
|
var schema = existing_tags.getColumnValue(4);
|
||||||
|
var tag_name = existing_tags.getColumnValue(2)
|
||||||
|
snowflake.execute({sqlText: `GRANT OWNERSHIP ON TAG ${DESTINATION_DB_NAME}.${schema}.${tag_name} TO ROLE ${ROLE_NAME} COPY CURRENT GRANTS;`});
|
||||||
|
}
|
||||||
|
|
||||||
|
snowflake.execute({sqlText: `COMMIT;`});
|
||||||
|
} catch (err) {
|
||||||
|
snowflake.execute({sqlText: `ROLLBACK;`});
|
||||||
|
throw(err);
|
||||||
|
}
|
||||||
|
|
||||||
|
return true
|
||||||
|
$$
|
||||||
|
|
||||||
|
{%- endmacro %}
|
||||||
21
macros/streamline/api_integrations.sql
Normal file
21
macros/streamline/api_integrations.sql
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
{% macro create_aws_movement_api() %}
|
||||||
|
{{ log(
|
||||||
|
"Creating integration for target:" ~ target
|
||||||
|
) }}
|
||||||
|
|
||||||
|
{% if target.name == "prod" %}
|
||||||
|
{% set sql %}
|
||||||
|
CREATE api integration IF NOT EXISTS aws_movement_api_prod_v2 api_provider = aws_api_gateway api_aws_role_arn = 'arn:aws:iam::924682671219:role/movement-api-prod-rolesnowflakeudfsAF733095-uYaV9o2uJLDU' api_allowed_prefixes = (
|
||||||
|
'https://d0t060jjxf.execute-api.us-east-1.amazonaws.com/prod/'
|
||||||
|
) enabled = TRUE;
|
||||||
|
{% endset %}
|
||||||
|
{% do run_query(sql) %}
|
||||||
|
{% else %}
|
||||||
|
{% set sql %}
|
||||||
|
CREATE api integration IF NOT EXISTS aws_movement_api_stg_v2 api_provider = aws_api_gateway api_aws_role_arn = 'arn:aws:iam::704693948482:role/movement-api-stg-rolesnowflakeudfsAF733095-XfjrcIZRSZkR' api_allowed_prefixes = (
|
||||||
|
'https://qjj5rutl05.execute-api.us-east-1.amazonaws.com/stg/'
|
||||||
|
) enabled = TRUE;
|
||||||
|
{% endset %}
|
||||||
|
{% do run_query(sql) %}
|
||||||
|
{% endif %}
|
||||||
|
{% endmacro %}
|
||||||
70
macros/streamline/models.sql
Normal file
70
macros/streamline/models.sql
Normal file
@ -0,0 +1,70 @@
|
|||||||
|
{% macro streamline_external_table_query_v2(
|
||||||
|
model,
|
||||||
|
partition_function
|
||||||
|
) %}
|
||||||
|
|
||||||
|
{% set days = var("BRONZE_LOOKBACK_DAYS")%}
|
||||||
|
|
||||||
|
WITH meta AS (
|
||||||
|
SELECT
|
||||||
|
last_modified AS inserted_timestamp,
|
||||||
|
file_name,
|
||||||
|
{{ partition_function }} AS partition_key
|
||||||
|
FROM
|
||||||
|
TABLE(
|
||||||
|
information_schema.external_table_file_registration_history(
|
||||||
|
start_time => DATEADD('day', -ABS({{days}}), CURRENT_TIMESTAMP()),
|
||||||
|
table_name => '{{ source( "bronze_streamline", model) }}')
|
||||||
|
) A
|
||||||
|
)
|
||||||
|
SELECT
|
||||||
|
s.*,
|
||||||
|
b.file_name,
|
||||||
|
inserted_timestamp
|
||||||
|
FROM
|
||||||
|
{{ source(
|
||||||
|
"bronze_streamline",
|
||||||
|
model
|
||||||
|
) }}
|
||||||
|
s
|
||||||
|
JOIN meta b
|
||||||
|
ON b.file_name = metadata$filename
|
||||||
|
AND b.partition_key = s.partition_key
|
||||||
|
WHERE
|
||||||
|
b.partition_key = s.partition_key
|
||||||
|
AND DATA :error IS NULL
|
||||||
|
{% endmacro %}
|
||||||
|
|
||||||
|
{% macro streamline_external_table_FR_query_v2(
|
||||||
|
model,
|
||||||
|
partition_function
|
||||||
|
) %}
|
||||||
|
WITH meta AS (
|
||||||
|
SELECT
|
||||||
|
registered_on AS inserted_timestamp,
|
||||||
|
file_name,
|
||||||
|
{{ partition_function }} AS partition_key
|
||||||
|
FROM
|
||||||
|
TABLE(
|
||||||
|
information_schema.external_table_files(
|
||||||
|
table_name => '{{ source( "bronze_streamline", model) }}'
|
||||||
|
)
|
||||||
|
) A
|
||||||
|
)
|
||||||
|
SELECT
|
||||||
|
s.*,
|
||||||
|
b.file_name,
|
||||||
|
inserted_timestamp
|
||||||
|
FROM
|
||||||
|
{{ source(
|
||||||
|
"bronze_streamline",
|
||||||
|
model
|
||||||
|
) }}
|
||||||
|
s
|
||||||
|
JOIN meta b
|
||||||
|
ON b.file_name = metadata$filename
|
||||||
|
AND b.partition_key = s.partition_key
|
||||||
|
WHERE
|
||||||
|
b.partition_key = s.partition_key
|
||||||
|
AND DATA :error IS NULL
|
||||||
|
{% endmacro %}
|
||||||
10
macros/streamline/streamline_udfs.sql
Normal file
10
macros/streamline/streamline_udfs.sql
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
{% macro create_udf_bulk_rest_api_v2() %}
|
||||||
|
CREATE
|
||||||
|
OR REPLACE EXTERNAL FUNCTION streamline.udf_bulk_rest_api_v2(
|
||||||
|
json OBJECT
|
||||||
|
) returns ARRAY api_integration = {% if target.name == "prod" %}
|
||||||
|
aws_movement_api_prod_v2 AS 'https://d0t060jjxf.execute-api.us-east-1.amazonaws.com/prod/udf_bulk_rest_api'
|
||||||
|
{% else %}
|
||||||
|
aws_movement_api_stg_v2 AS 'https://qjj5rutl05.execute-api.us-east-1.amazonaws.com/stg/udf_bulk_rest_api'
|
||||||
|
{%- endif %};
|
||||||
|
{% endmacro %}
|
||||||
6
macros/tags/add_database_or_schema_tags.sql
Normal file
6
macros/tags/add_database_or_schema_tags.sql
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
{% macro add_database_or_schema_tags() %}
|
||||||
|
{{ set_database_tag_value(
|
||||||
|
'BLOCKCHAIN_NAME',
|
||||||
|
'movement'
|
||||||
|
) }}
|
||||||
|
{% endmacro %}
|
||||||
127
macros/tags/snowflake_tagging.sql
Normal file
127
macros/tags/snowflake_tagging.sql
Normal file
@ -0,0 +1,127 @@
|
|||||||
|
{% macro apply_meta_as_tags(results) %}
|
||||||
|
{% if var("UPDATE_SNOWFLAKE_TAGS") %}
|
||||||
|
{{ log('apply_meta_as_tags', info=False) }}
|
||||||
|
{{ log(results, info=False) }}
|
||||||
|
{% if execute %}
|
||||||
|
|
||||||
|
{%- set tags_by_schema = {} -%}
|
||||||
|
{% for res in results -%}
|
||||||
|
{% if res.node.meta.database_tags %}
|
||||||
|
|
||||||
|
{%- set model_database = res.node.database -%}
|
||||||
|
{%- set model_schema = res.node.schema -%}
|
||||||
|
{%- set model_schema_full = model_database+'.'+model_schema -%}
|
||||||
|
{%- set model_alias = res.node.alias -%}
|
||||||
|
|
||||||
|
{% if model_schema_full not in tags_by_schema.keys() %}
|
||||||
|
{{ log('need to fetch tags for schema '+model_schema_full, info=False) }}
|
||||||
|
{%- call statement('main', fetch_result=True) -%}
|
||||||
|
show tags in {{model_database}}.{{model_schema}}
|
||||||
|
{%- endcall -%}
|
||||||
|
{%- set _ = tags_by_schema.update({model_schema_full: load_result('main')['table'].columns.get('name').values()|list}) -%}
|
||||||
|
{{ log('Added tags to cache', info=False) }}
|
||||||
|
{% else %}
|
||||||
|
{{ log('already have tag info for schema', info=False) }}
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{%- set current_tags_in_schema = tags_by_schema[model_schema_full] -%}
|
||||||
|
{{ log('current_tags_in_schema:', info=False) }}
|
||||||
|
{{ log(current_tags_in_schema, info=False) }}
|
||||||
|
{{ log("========== Processing tags for "+model_schema_full+"."+model_alias+" ==========", info=False) }}
|
||||||
|
|
||||||
|
{% set line -%}
|
||||||
|
node: {{ res.node.unique_id }}; status: {{ res.status }} (message: {{ res.message }})
|
||||||
|
node full: {{ res.node}}
|
||||||
|
meta: {{ res.node.meta}}
|
||||||
|
materialized: {{ res.node.config.materialized }}
|
||||||
|
{%- endset %}
|
||||||
|
{{ log(line, info=False) }}
|
||||||
|
|
||||||
|
{%- call statement('main', fetch_result=True) -%}
|
||||||
|
select LEVEL,UPPER(TAG_NAME) as TAG_NAME,TAG_VALUE from table(information_schema.tag_references_all_columns('{{model_schema}}.{{model_alias}}', 'table'))
|
||||||
|
{%- endcall -%}
|
||||||
|
{%- set existing_tags_for_table = load_result('main')['data'] -%}
|
||||||
|
{{ log('Existing tags for table:', info=False) }}
|
||||||
|
{{ log(existing_tags_for_table, info=False) }}
|
||||||
|
|
||||||
|
{{ log('--', info=False) }}
|
||||||
|
{% for table_tag in res.node.meta.database_tags.table %}
|
||||||
|
|
||||||
|
{{ create_tag_if_missing(current_tags_in_schema,table_tag|upper) }}
|
||||||
|
{% set desired_tag_value = res.node.meta.database_tags.table[table_tag] %}
|
||||||
|
|
||||||
|
{{set_table_tag_value_if_different(model_schema,model_alias,table_tag,desired_tag_value,existing_tags_for_table)}}
|
||||||
|
{% endfor %}
|
||||||
|
{{ log("========== Finished processing tags for "+model_alias+" ==========", info=False) }}
|
||||||
|
{% endif %}
|
||||||
|
{% endfor %}
|
||||||
|
{% endif %}
|
||||||
|
{% endif %}
|
||||||
|
{% endmacro %}
|
||||||
|
|
||||||
|
|
||||||
|
{% macro create_tag_if_missing(all_tag_names,table_tag) %}
|
||||||
|
{% if table_tag not in all_tag_names %}
|
||||||
|
{{ log('Creating missing tag '+table_tag, info=False) }}
|
||||||
|
{%- call statement('main', fetch_result=True) -%}
|
||||||
|
create tag if not exists silver.{{table_tag}}
|
||||||
|
{%- endcall -%}
|
||||||
|
{{ log(load_result('main').data, info=False) }}
|
||||||
|
{% else %}
|
||||||
|
{{ log('Tag already exists: '+table_tag, info=False) }}
|
||||||
|
{% endif %}
|
||||||
|
{% endmacro %}
|
||||||
|
|
||||||
|
{% macro set_table_tag_value_if_different(model_schema,table_name,tag_name,desired_tag_value,existing_tags) %}
|
||||||
|
{{ log('Ensuring tag '+tag_name+' has value '+desired_tag_value+' at table level', info=False) }}
|
||||||
|
{%- set existing_tag_for_table = existing_tags|selectattr('0','equalto','TABLE')|selectattr('1','equalto',tag_name|upper)|list -%}
|
||||||
|
{{ log('Filtered tags for table:', info=False) }}
|
||||||
|
{{ log(existing_tag_for_table[0], info=False) }}
|
||||||
|
{% if existing_tag_for_table|length > 0 and existing_tag_for_table[0][2]==desired_tag_value %}
|
||||||
|
{{ log('Correct tag value already exists', info=False) }}
|
||||||
|
{% else %}
|
||||||
|
{{ log('Setting tag value for '+tag_name+' to value '+desired_tag_value, info=False) }}
|
||||||
|
{%- call statement('main', fetch_result=True) -%}
|
||||||
|
alter table {{model_schema}}.{{table_name}} set tag {{tag_name}} = '{{desired_tag_value}}'
|
||||||
|
{%- endcall -%}
|
||||||
|
{{ log(load_result('main').data, info=False) }}
|
||||||
|
{% endif %}
|
||||||
|
{% endmacro %}
|
||||||
|
|
||||||
|
{% macro set_column_tag_value_if_different(table_name,column_name,tag_name,desired_tag_value,existing_tags) %}
|
||||||
|
{{ log('Ensuring tag '+tag_name+' has value '+desired_tag_value+' at column level', info=False) }}
|
||||||
|
{%- set existing_tag_for_column = existing_tags|selectattr('0','equalto','COLUMN')|selectattr('1','equalto',tag_name|upper)|list -%}
|
||||||
|
{{ log('Filtered tags for column:', info=False) }}
|
||||||
|
{{ log(existing_tag_for_column[0], info=False) }}
|
||||||
|
{% if existing_tag_for_column|length > 0 and existing_tag_for_column[0][2]==desired_tag_value %}
|
||||||
|
{{ log('Correct tag value already exists', info=False) }}
|
||||||
|
{% else %}
|
||||||
|
{{ log('Setting tag value for '+tag_name+' to value '+desired_tag_value, info=False) }}
|
||||||
|
{%- call statement('main', fetch_result=True) -%}
|
||||||
|
alter table {{table_name}} modify column {{column_name}} set tag {{tag_name}} = '{{desired_tag_value}}'
|
||||||
|
{%- endcall -%}
|
||||||
|
{{ log(load_result('main').data, info=False) }}
|
||||||
|
{% endif %}
|
||||||
|
{% endmacro %}
|
||||||
|
|
||||||
|
{% macro set_database_tag_value(tag_name,tag_value) %}
|
||||||
|
{% set query %}
|
||||||
|
create tag if not exists silver.{{tag_name}}
|
||||||
|
{% endset %}
|
||||||
|
{% do run_query(query) %}
|
||||||
|
{% set query %}
|
||||||
|
alter database {{target.database}} set tag {{target.database}}.silver.{{tag_name}} = '{{tag_value}}'
|
||||||
|
{% endset %}
|
||||||
|
{% do run_query(query) %}
|
||||||
|
{% endmacro %}
|
||||||
|
|
||||||
|
{% macro set_schema_tag_value(target_schema,tag_name,tag_value) %}
|
||||||
|
{% set query %}
|
||||||
|
create tag if not exists silver.{{tag_name}}
|
||||||
|
{% endset %}
|
||||||
|
{% do run_query(query) %}
|
||||||
|
{% set query %}
|
||||||
|
alter schema {{target.database}}.{{target_schema}} set tag {{target.database}}.silver.{{tag_name}} = '{{tag_value}}'
|
||||||
|
{% endset %}
|
||||||
|
{% do run_query(query) %}
|
||||||
|
{% endmacro %}
|
||||||
29
macros/tests/compare_model_subset.sql
Normal file
29
macros/tests/compare_model_subset.sql
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
{% test compare_model_subset(model, compare_model, compare_columns, model_condition) %}
|
||||||
|
|
||||||
|
{% set compare_cols_csv = compare_columns | join(', ') %}
|
||||||
|
|
||||||
|
with a as (
|
||||||
|
select {{compare_cols_csv}} from {{ model }}
|
||||||
|
{{ model_condition }}
|
||||||
|
),
|
||||||
|
b as (
|
||||||
|
select {{compare_cols_csv}} from {{ compare_model }}
|
||||||
|
),
|
||||||
|
a_minus_b as (
|
||||||
|
select * from a
|
||||||
|
except
|
||||||
|
select * from b
|
||||||
|
),
|
||||||
|
b_minus_a as (
|
||||||
|
select * from b
|
||||||
|
except
|
||||||
|
select * from a
|
||||||
|
),
|
||||||
|
unioned as (
|
||||||
|
select 'in_actual_not_in_expected' as which_diff, a_minus_b.* from a_minus_b
|
||||||
|
union all
|
||||||
|
select 'in_expected_not_in_actual' as which_diff, b_minus_a.* from b_minus_a
|
||||||
|
)
|
||||||
|
select * from unioned
|
||||||
|
|
||||||
|
{% endtest %}
|
||||||
37
macros/tests/sequence_gaps.sql
Normal file
37
macros/tests/sequence_gaps.sql
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
{% macro sequence_gaps(
|
||||||
|
table,
|
||||||
|
partition_by,
|
||||||
|
column
|
||||||
|
) %}
|
||||||
|
{%- set partition_sql = partition_by | join(", ") -%}
|
||||||
|
{%- set previous_column = "prev_" ~ column -%}
|
||||||
|
WITH source AS (
|
||||||
|
SELECT
|
||||||
|
{{ partition_sql + "," if partition_sql }}
|
||||||
|
{{ column }},
|
||||||
|
LAG(
|
||||||
|
{{ column }},
|
||||||
|
1
|
||||||
|
) over (
|
||||||
|
{{ "PARTITION BY " ~ partition_sql if partition_sql }}
|
||||||
|
ORDER BY
|
||||||
|
{{ column }} ASC
|
||||||
|
) AS {{ previous_column }}
|
||||||
|
FROM
|
||||||
|
{{ table }}
|
||||||
|
WHERE
|
||||||
|
block_timestamp::date <= current_date - 1
|
||||||
|
)
|
||||||
|
SELECT
|
||||||
|
{{ partition_sql + "," if partition_sql }}
|
||||||
|
{{ previous_column }},
|
||||||
|
{{ column }},
|
||||||
|
{{ column }} - {{ previous_column }}
|
||||||
|
- 1 AS gap
|
||||||
|
FROM
|
||||||
|
source
|
||||||
|
WHERE
|
||||||
|
{{ column }} - {{ previous_column }} <> 1
|
||||||
|
ORDER BY
|
||||||
|
gap DESC
|
||||||
|
{% endmacro %}
|
||||||
33
macros/tests/tx_gaps.sql
Normal file
33
macros/tests/tx_gaps.sql
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
{% macro tx_gaps(
|
||||||
|
model
|
||||||
|
) %}
|
||||||
|
WITH block_base AS (
|
||||||
|
SELECT
|
||||||
|
block_number,
|
||||||
|
tx_count
|
||||||
|
FROM
|
||||||
|
{{ ref('silver__blocks') }}
|
||||||
|
),
|
||||||
|
model_name AS (
|
||||||
|
SELECT
|
||||||
|
block_number,
|
||||||
|
COUNT(
|
||||||
|
DISTINCT tx_id
|
||||||
|
) AS model_tx_count
|
||||||
|
FROM
|
||||||
|
{{ model }}
|
||||||
|
GROUP BY
|
||||||
|
block_number
|
||||||
|
)
|
||||||
|
SELECT
|
||||||
|
block_base.block_number,
|
||||||
|
tx_count,
|
||||||
|
model_name.block_number,
|
||||||
|
model_tx_count
|
||||||
|
FROM
|
||||||
|
block_base
|
||||||
|
LEFT JOIN model_name
|
||||||
|
ON block_base.block_number = model_name.block_number
|
||||||
|
WHERE
|
||||||
|
tx_count <> model_tx_count
|
||||||
|
{% endmacro %}
|
||||||
42
macros/utils.sql
Normal file
42
macros/utils.sql
Normal file
@ -0,0 +1,42 @@
|
|||||||
|
{% macro if_data_call_wait() %}
|
||||||
|
{% if var(
|
||||||
|
"STREAMLINE_INVOKE_STREAMS"
|
||||||
|
) %}
|
||||||
|
{% set query %}
|
||||||
|
SELECT
|
||||||
|
1
|
||||||
|
WHERE
|
||||||
|
EXISTS(
|
||||||
|
SELECT
|
||||||
|
1
|
||||||
|
FROM
|
||||||
|
{{ model.schema ~ "." ~ model.alias }}
|
||||||
|
LIMIT
|
||||||
|
1
|
||||||
|
) {% endset %}
|
||||||
|
{% if execute %}
|
||||||
|
{% set results = run_query(
|
||||||
|
query
|
||||||
|
) %}
|
||||||
|
{% if results %}
|
||||||
|
{{ log(
|
||||||
|
"Waiting...",
|
||||||
|
info = True
|
||||||
|
) }}
|
||||||
|
|
||||||
|
{% set wait_query %}
|
||||||
|
SELECT
|
||||||
|
system$wait(
|
||||||
|
{{ var(
|
||||||
|
"WAIT",
|
||||||
|
600
|
||||||
|
) }}
|
||||||
|
) {% endset %}
|
||||||
|
{% do run_query(wait_query) %}
|
||||||
|
{% else %}
|
||||||
|
SELECT
|
||||||
|
NULL;
|
||||||
|
{% endif %}
|
||||||
|
{% endif %}
|
||||||
|
{% endif %}
|
||||||
|
{% endmacro %}
|
||||||
16
package-lock.yml
Normal file
16
package-lock.yml
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
packages:
|
||||||
|
- package: calogica/dbt_expectations
|
||||||
|
version: 0.8.5
|
||||||
|
- package: dbt-labs/dbt_utils
|
||||||
|
version: 1.0.0
|
||||||
|
- git: https://github.com/FlipsideCrypto/fsc-utils.git
|
||||||
|
revision: eb33ac727af26ebc8a8cc9711d4a6ebc3790a107
|
||||||
|
- package: get-select/dbt_snowflake_query_tags
|
||||||
|
version: 2.5.0
|
||||||
|
- package: Snowflake-Labs/dbt_constraints
|
||||||
|
version: 0.6.3
|
||||||
|
- package: calogica/dbt_date
|
||||||
|
version: 0.7.2
|
||||||
|
- git: https://github.com/FlipsideCrypto/livequery-models.git
|
||||||
|
revision: b024188be4e9c6bc00ed77797ebdc92d351d620e
|
||||||
|
sha1_hash: d3219b9c206b5988189dcdafae0ec22ca9b4056c
|
||||||
11
packages.yml
Normal file
11
packages.yml
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
packages:
|
||||||
|
- package: calogica/dbt_expectations
|
||||||
|
version: [">=0.4.0", "<0.9.0"]
|
||||||
|
- package: dbt-labs/dbt_utils
|
||||||
|
version: 1.0.0
|
||||||
|
- git: https://github.com/FlipsideCrypto/fsc-utils.git
|
||||||
|
revision: v1.32.0
|
||||||
|
- package: get-select/dbt_snowflake_query_tags
|
||||||
|
version: [">=2.0.0", "<3.0.0"]
|
||||||
|
- package: Snowflake-Labs/dbt_constraints
|
||||||
|
version: [">=0.4.0"]
|
||||||
29
profiles.yml
Normal file
29
profiles.yml
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
movement:
|
||||||
|
target: dev
|
||||||
|
outputs:
|
||||||
|
dev:
|
||||||
|
type: snowflake
|
||||||
|
account: "{{ env_var('ACCOUNT') }}"
|
||||||
|
user: "{{ env_var('USER') }}"
|
||||||
|
password: "{{ env_var('PASSWORD') }}"
|
||||||
|
role: "{{ env_var('ROLE') }}"
|
||||||
|
schema: "{{ env_var('SCHEMA') }}"
|
||||||
|
region: "{{ env_var('REGION') }}"
|
||||||
|
database: "{{ env_var('DATABASE') }}"
|
||||||
|
warehouse: "{{ env_var('WAREHOUSE') }}"
|
||||||
|
threads: 8
|
||||||
|
client_session_keep_alive: False
|
||||||
|
prod:
|
||||||
|
type: snowflake
|
||||||
|
account: "{{ env_var('ACCOUNT') }}"
|
||||||
|
user: "{{ env_var('USER') }}"
|
||||||
|
password: "{{ env_var('PASSWORD') }}"
|
||||||
|
role: "{{ env_var('ROLE') }}"
|
||||||
|
schema: "{{ env_var('SCHEMA') }}"
|
||||||
|
region: "{{ env_var('REGION') }}"
|
||||||
|
database: "{{ env_var('DATABASE') }}"
|
||||||
|
warehouse: "{{ env_var('WAREHOUSE') }}"
|
||||||
|
threads: 8
|
||||||
|
client_session_keep_alive: False
|
||||||
|
config:
|
||||||
|
send_anonymous_usage_stats: False
|
||||||
3
requirements.txt
Normal file
3
requirements.txt
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
dbt-core>=1.9,<1.10
|
||||||
|
dbt-snowflake>=1.9,<1.10
|
||||||
|
protobuf==4.25.3
|
||||||
0
snapshots/.gitkeep
Normal file
0
snapshots/.gitkeep
Normal file
Loading…
Reference in New Issue
Block a user