mirror of
https://github.com/FlipsideCrypto/movement-models.git
synced 2026-02-06 09:26:45 +00:00
first commit, cloned from m1 with updated resources
This commit is contained in:
parent
5453a0af50
commit
f34f80e87a
75
README.md
Normal file
75
README.md
Normal file
@ -0,0 +1,75 @@
|
||||
|
||||
## Profile Set Up
|
||||
|
||||
#### Use the following within profiles.yml
|
||||
----
|
||||
|
||||
```yml
|
||||
movement:
|
||||
target: dev
|
||||
outputs:
|
||||
dev:
|
||||
type: snowflake
|
||||
account: <ACCOUNT>
|
||||
role: <ROLE>
|
||||
user: <USERNAME>
|
||||
password: <PASSWORD>
|
||||
region: <REGION>
|
||||
database: MOVEMENT_DEV
|
||||
warehouse: <WAREHOUSE>
|
||||
schema: silver
|
||||
threads: 4
|
||||
client_session_keep_alive: False
|
||||
query_tag: <TAG>
|
||||
```
|
||||
|
||||
### Resources:
|
||||
- Learn more about dbt [in the docs](https://docs.getdbt.com/docs/introduction)
|
||||
- Check out [Discourse](https://discourse.getdbt.com/) for commonly asked questions and answers
|
||||
- Join the [chat](https://community.getdbt.com/) on Slack for live discussions and support
|
||||
- Find [dbt events](https://events.getdbt.com) near you
|
||||
- Check out [the blog](https://blog.getdbt.com/) for the latest news on dbt's development and best practices
|
||||
|
||||
- Check out [the blog](https://blog.getdbt.com/) for the latest news on dbt's development and best practices
|
||||
|
||||
## Applying Model Tags
|
||||
|
||||
### Database / Schema level tags
|
||||
|
||||
Database and schema tags are applied via the `add_database_or_schema_tags` macro. These tags are inherited by their downstream objects. To add/modify tags call the appropriate tag set function within the macro.
|
||||
|
||||
```
|
||||
{{ set_database_tag_value('SOME_DATABASE_TAG_KEY','SOME_DATABASE_TAG_VALUE') }}
|
||||
{{ set_schema_tag_value('SOME_SCHEMA_TAG_KEY','SOME_SCHEMA_TAG_VALUE') }}
|
||||
```
|
||||
|
||||
### Model tags
|
||||
|
||||
To add/update a model's snowflake tags, add/modify the `meta` model property under `config`. Only table level tags are supported at this time via DBT.
|
||||
|
||||
```
|
||||
{{ config(
|
||||
...,
|
||||
meta={
|
||||
'database_tags':{
|
||||
'table': {
|
||||
'PURPOSE': 'SOME_PURPOSE'
|
||||
}
|
||||
}
|
||||
},
|
||||
...
|
||||
) }}
|
||||
```
|
||||
|
||||
By default, model tags are not pushed to snowflake on each load. You can push a tag update for a model by specifying the `UPDATE_SNOWFLAKE_TAGS` project variable during a run.
|
||||
|
||||
```
|
||||
dbt run --var '{"UPDATE_SNOWFLAKE_TAGS":True}' -s models/core/core__fact_swaps.sql
|
||||
```
|
||||
|
||||
### Querying for existing tags on a model in snowflake
|
||||
|
||||
```
|
||||
select *
|
||||
from table(m1.information_schema.tag_references('movement.core.fact_blocks', 'table'));
|
||||
```
|
||||
0
analyses/.gitkeep
Normal file
0
analyses/.gitkeep
Normal file
0
data/.gitkeep
Normal file
0
data/.gitkeep
Normal file
6
data/github_actions__workflows.csv
Normal file
6
data/github_actions__workflows.csv
Normal file
@ -0,0 +1,6 @@
|
||||
workflow_name,workflow_schedule
|
||||
dbt_run_streamline_blocks_tx_realtime,"0,20,40 * * * *"
|
||||
dbt_run_streamline_transactions_realtime,"15,55 * * * *"
|
||||
dbt_run_incremental_core,"15,35,55 * * * *"
|
||||
dbt_test_intraday,"20 */4 * * *"
|
||||
dbt_test_tasks,"0,30 * * * *"
|
||||
|
103
dbt_project.yml
Normal file
103
dbt_project.yml
Normal file
@ -0,0 +1,103 @@
|
||||
# Name your project! Project names should contain only lowercase characters
|
||||
# and underscores. A good package name should reflect your organization's
|
||||
# name or the intended use of these models
|
||||
name: "movement_models"
|
||||
version: "1.0.0"
|
||||
config-version: 2
|
||||
|
||||
require-dbt-version: ">=1.8.0"
|
||||
|
||||
# This setting configures which "profile" dbt uses for this project.
|
||||
profile: "movement"
|
||||
|
||||
# These configurations specify where dbt should look for different types of files.
|
||||
# The `source-paths` config, for example, states that models in this project can be
|
||||
# found in the "models/" directory. You probably won't need to change these!
|
||||
model-paths: ["models"]
|
||||
analysis-paths: ["analysis"]
|
||||
test-paths: ["tests"]
|
||||
seed-paths: ["data"]
|
||||
macro-paths: ["macros"]
|
||||
snapshot-paths: ["snapshots"]
|
||||
|
||||
target-path: "target" # directory which will store compiled SQL files
|
||||
clean-targets: # directories to be removed by `dbt clean`
|
||||
- "target"
|
||||
- "dbt_modules"
|
||||
- "dbt_packages"
|
||||
|
||||
tests:
|
||||
+store_failures: true # all tests
|
||||
|
||||
on-run-start:
|
||||
- "{{ create_sps() }}"
|
||||
- "{{ create_udfs() }}"
|
||||
|
||||
on-run-end:
|
||||
- '{{ apply_meta_as_tags(results) }}'
|
||||
|
||||
dispatch:
|
||||
- macro_namespace: dbt
|
||||
search_order:
|
||||
- movement-models
|
||||
- dbt_snowflake_query_tags
|
||||
- dbt
|
||||
|
||||
query-comment:
|
||||
comment: '{{ dbt_snowflake_query_tags.get_query_comment(node) }}'
|
||||
append: true # Snowflake removes prefixed comments.
|
||||
|
||||
# Configuring models
|
||||
# Full documentation: https://docs.getdbt.com/docs/configuring-models
|
||||
|
||||
models:
|
||||
+copy_grants: true
|
||||
+persist_docs:
|
||||
relation: true
|
||||
columns: true
|
||||
+on_schema_change: "append_new_columns"
|
||||
|
||||
# In this example config, we tell dbt to build all models in the example/ directory
|
||||
# as tables. These settings can be overridden in the individual model files
|
||||
# using the `{{ config(...) }}` macro.
|
||||
|
||||
vars:
|
||||
"dbt_date:time_zone": GMT
|
||||
STREAMLINE_INVOKE_STREAMS: false
|
||||
STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES: false
|
||||
UPDATE_UDFS_AND_SPS: false
|
||||
UPDATE_SNOWFLAKE_TAGS: True
|
||||
OBSERV_FULL_TEST: false
|
||||
START_GHA_TASKS: false
|
||||
BRONZE_LOOKBACK_DAYS: '{{ env_var("BRONZE_LOOKBACK_DAYS", 3) }}'
|
||||
|
||||
#### STREAMLINE 2.0 BEGIN ####
|
||||
|
||||
API_INTEGRATION: '{{ var("config")[target.name]["API_INTEGRATION"] if var("config")[target.name] else var("config")["dev"]["API_INTEGRATION"] }}'
|
||||
EXTERNAL_FUNCTION_URI: '{{ var("config")[target.name]["EXTERNAL_FUNCTION_URI"] if var("config")[target.name] else var("config")["dev"]["EXTERNAL_FUNCTION_URI"] }}'
|
||||
ROLES: '{{ var("config")[target.name]["ROLES"] }}'
|
||||
|
||||
config:
|
||||
# The keys correspond to dbt profiles and are case sensitive
|
||||
dev:
|
||||
API_INTEGRATION: aws_m1_api_dev
|
||||
EXTERNAL_FUNCTION_URI: https://z2wyjkp9r7.execute-api.us-east-1.amazonaws.com/stg/
|
||||
ROLES:
|
||||
- AWS_LAMBDA_MOVEMENT_API
|
||||
- INTERNAL_DEV
|
||||
dev-2xl:
|
||||
API_INTEGRATION: aws_m1_api_dev
|
||||
EXTERNAL_FUNCTION_URI: https://z2wyjkp9r7.execute-api.us-east-1.amazonaws.com/stg/
|
||||
ROLES:
|
||||
- AWS_LAMBDA_MOVEMENT_API
|
||||
- INTERNAL_DEV
|
||||
|
||||
prod:
|
||||
API_INTEGRATION: aws_movement_api_prod
|
||||
EXTERNAL_FUNCTION_URI: https://d0t060jjxf.execute-api.us-east-1.amazonaws.com/prod/
|
||||
ROLES:
|
||||
- AWS_LAMBDA_MOVEMENT_API
|
||||
- INTERNAL_DEV
|
||||
- DBT_CLOUD_MOVEMENT
|
||||
|
||||
#### STREAMLINE 2.0 END ####
|
||||
0
docs/.gitkeep
Normal file
0
docs/.gitkeep
Normal file
0
macros/.gitkeep
Normal file
0
macros/.gitkeep
Normal file
6
macros/create_sps.sql
Normal file
6
macros/create_sps.sql
Normal file
@ -0,0 +1,6 @@
|
||||
{% macro create_sps() %}
|
||||
{% if target.database == 'MOVEMENT' %}
|
||||
CREATE schema IF NOT EXISTS _internal;
|
||||
{{ sp_create_prod_clone('_internal') }};
|
||||
{% endif %}
|
||||
{% endmacro %}
|
||||
10
macros/create_udfs.sql
Normal file
10
macros/create_udfs.sql
Normal file
@ -0,0 +1,10 @@
|
||||
{% macro create_udfs() %}
|
||||
{% if var("UPDATE_UDFS_AND_SPS") %}
|
||||
{% set sql %}
|
||||
CREATE schema if NOT EXISTS silver;
|
||||
{{ create_udf_bulk_rest_api_v2() }}
|
||||
|
||||
{% endset %}
|
||||
{% do run_query(sql) %}
|
||||
{% endif %}
|
||||
{% endmacro %}
|
||||
23
macros/custom_naming_macros.sql
Normal file
23
macros/custom_naming_macros.sql
Normal file
@ -0,0 +1,23 @@
|
||||
{% macro generate_schema_name(
|
||||
custom_schema_name = none,
|
||||
node = none
|
||||
) -%}
|
||||
{% set node_name = node.name %}
|
||||
{% set split_name = node_name.split('__') %}
|
||||
{{ split_name [0] | trim }}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro generate_alias_name(
|
||||
custom_alias_name = none,
|
||||
node = none
|
||||
) -%}
|
||||
{% set node_name = node.name %}
|
||||
{% set split_name = node_name.split('__') %}
|
||||
{{ split_name [1] | trim }}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro generate_tmp_view_name(model_name) -%}
|
||||
{% set node_name = model_name.name %}
|
||||
{% set split_name = node_name.split('__') %}
|
||||
{{ target.database ~ '.' ~ split_name[0] ~ '.' ~ split_name [1] ~ '__dbt_tmp' | trim }}
|
||||
{%- endmacro %}
|
||||
11
macros/custom_query_tag.sql
Normal file
11
macros/custom_query_tag.sql
Normal file
@ -0,0 +1,11 @@
|
||||
{% macro set_query_tag() -%}
|
||||
{% set new_json = {"repo":project_name, "object":this.table, "profile":target.profile_name, "env":target.name, "existing_tag":get_current_query_tag() } %}
|
||||
{% set new_query_tag = tojson(new_json) | as_text %}
|
||||
{% if new_query_tag %}
|
||||
{% set original_query_tag = get_current_query_tag() %}
|
||||
{{ log("Setting query_tag to '" ~ new_query_tag ~ "'. Will reset to '" ~ original_query_tag ~ "' after materialization.") }}
|
||||
{% do run_query("alter session set query_tag = '{}'".format(new_query_tag)) %}
|
||||
{{ return(original_query_tag)}}
|
||||
{% endif %}
|
||||
{{ return(none)}}
|
||||
{% endmacro %}
|
||||
16
macros/dbt/get_merge_sql.sql
Normal file
16
macros/dbt/get_merge_sql.sql
Normal file
@ -0,0 +1,16 @@
|
||||
{% macro get_merge_sql(
|
||||
target,
|
||||
source,
|
||||
unique_key,
|
||||
dest_columns,
|
||||
incremental_predicates
|
||||
) -%}
|
||||
{% set merge_sql = fsc_utils.get_merge_sql(
|
||||
target,
|
||||
source,
|
||||
unique_key,
|
||||
dest_columns,
|
||||
incremental_predicates
|
||||
) %}
|
||||
{{ return(merge_sql) }}
|
||||
{% endmacro %}
|
||||
8
macros/dbt/get_tmp_relation_type.sql
Normal file
8
macros/dbt/get_tmp_relation_type.sql
Normal file
@ -0,0 +1,8 @@
|
||||
{% macro dbt_snowflake_get_tmp_relation_type(
|
||||
strategy,
|
||||
unique_key,
|
||||
language
|
||||
) %}
|
||||
-- always table
|
||||
{{ return('table') }}
|
||||
{% endmacro %}
|
||||
10
macros/run_sp_create_prod_clone.sql
Normal file
10
macros/run_sp_create_prod_clone.sql
Normal file
@ -0,0 +1,10 @@
|
||||
{% macro run_sp_create_prod_clone() %}
|
||||
{% set clone_query %}
|
||||
call m1._internal.create_prod_clone(
|
||||
'movement',
|
||||
'movement_dev',
|
||||
'internal_dev'
|
||||
);
|
||||
{% endset %}
|
||||
{% do run_query(clone_query) %}
|
||||
{% endmacro %}
|
||||
44
macros/sp_create_prod_clone.sql
Normal file
44
macros/sp_create_prod_clone.sql
Normal file
@ -0,0 +1,44 @@
|
||||
{% macro sp_create_prod_clone(target_schema) -%}
|
||||
|
||||
create or replace procedure {{ target_schema }}.create_prod_clone(source_db_name string, destination_db_name string, role_name string)
|
||||
returns boolean
|
||||
language javascript
|
||||
execute as caller
|
||||
as
|
||||
$$
|
||||
snowflake.execute({sqlText: `BEGIN TRANSACTION;`});
|
||||
try {
|
||||
snowflake.execute({sqlText: `CREATE OR REPLACE DATABASE ${DESTINATION_DB_NAME} CLONE ${SOURCE_DB_NAME}`});
|
||||
snowflake.execute({sqlText: `DROP SCHEMA IF EXISTS ${DESTINATION_DB_NAME}._INTERNAL`}); /* this only needs to be in prod */
|
||||
|
||||
snowflake.execute({sqlText: `GRANT OWNERSHIP ON ALL SCHEMAS IN DATABASE ${DESTINATION_DB_NAME} TO ROLE ${ROLE_NAME} COPY CURRENT GRANTS;`});
|
||||
snowflake.execute({sqlText: `GRANT OWNERSHIP ON ALL FUNCTIONS IN DATABASE ${DESTINATION_DB_NAME} TO ROLE ${ROLE_NAME} COPY CURRENT GRANTS;`});
|
||||
snowflake.execute({sqlText: `GRANT OWNERSHIP ON ALL PROCEDURES IN DATABASE ${DESTINATION_DB_NAME} TO ROLE ${ROLE_NAME} COPY CURRENT GRANTS;`});
|
||||
snowflake.execute({sqlText: `GRANT OWNERSHIP ON ALL VIEWS IN DATABASE ${DESTINATION_DB_NAME} TO ROLE ${ROLE_NAME} COPY CURRENT GRANTS;`});
|
||||
snowflake.execute({sqlText: `GRANT OWNERSHIP ON ALL STAGES IN DATABASE ${DESTINATION_DB_NAME} TO ROLE ${ROLE_NAME} COPY CURRENT GRANTS;`});
|
||||
snowflake.execute({sqlText: `GRANT OWNERSHIP ON ALL TABLES IN DATABASE ${DESTINATION_DB_NAME} TO ROLE ${ROLE_NAME} COPY CURRENT GRANTS;`});
|
||||
snowflake.execute({sqlText: `GRANT OWNERSHIP ON FUTURE FUNCTIONS IN DATABASE ${DESTINATION_DB_NAME} TO ROLE ${ROLE_NAME};`});
|
||||
snowflake.execute({sqlText: `GRANT OWNERSHIP ON FUTURE PROCEDURES IN DATABASE ${DESTINATION_DB_NAME} TO ROLE ${ROLE_NAME};`});
|
||||
snowflake.execute({sqlText: `GRANT OWNERSHIP ON FUTURE VIEWS IN DATABASE ${DESTINATION_DB_NAME} TO ROLE ${ROLE_NAME};`});
|
||||
snowflake.execute({sqlText: `GRANT OWNERSHIP ON FUTURE STAGES IN DATABASE ${DESTINATION_DB_NAME} TO ROLE ${ROLE_NAME};`});
|
||||
snowflake.execute({sqlText: `GRANT OWNERSHIP ON FUTURE TABLES IN DATABASE ${DESTINATION_DB_NAME} TO ROLE ${ROLE_NAME};`});
|
||||
|
||||
snowflake.execute({sqlText: `GRANT OWNERSHIP ON DATABASE ${DESTINATION_DB_NAME} TO ROLE ${ROLE_NAME} COPY CURRENT GRANTS;`})
|
||||
|
||||
var existing_tags = snowflake.execute({sqlText: `SHOW TAGS IN DATABASE ${DESTINATION_DB_NAME};`});
|
||||
while (existing_tags.next()) {
|
||||
var schema = existing_tags.getColumnValue(4);
|
||||
var tag_name = existing_tags.getColumnValue(2)
|
||||
snowflake.execute({sqlText: `GRANT OWNERSHIP ON TAG ${DESTINATION_DB_NAME}.${schema}.${tag_name} TO ROLE ${ROLE_NAME} COPY CURRENT GRANTS;`});
|
||||
}
|
||||
|
||||
snowflake.execute({sqlText: `COMMIT;`});
|
||||
} catch (err) {
|
||||
snowflake.execute({sqlText: `ROLLBACK;`});
|
||||
throw(err);
|
||||
}
|
||||
|
||||
return true
|
||||
$$
|
||||
|
||||
{%- endmacro %}
|
||||
21
macros/streamline/api_integrations.sql
Normal file
21
macros/streamline/api_integrations.sql
Normal file
@ -0,0 +1,21 @@
|
||||
{% macro create_aws_movement_api() %}
|
||||
{{ log(
|
||||
"Creating integration for target:" ~ target
|
||||
) }}
|
||||
|
||||
{% if target.name == "prod" %}
|
||||
{% set sql %}
|
||||
CREATE api integration IF NOT EXISTS aws_movement_api_prod api_provider = aws_api_gateway api_aws_role_arn = 'arn:aws:iam::924682671219:role/movement-api-prod-rolesnowflakeudfsAF733095-uYaV9o2uJLDU' api_allowed_prefixes = (
|
||||
'https://d0t060jjxf.execute-api.us-east-1.amazonaws.com/prod/'
|
||||
) enabled = TRUE;
|
||||
{% endset %}
|
||||
{% do run_query(sql) %}
|
||||
{% else %}
|
||||
{% set sql %}
|
||||
CREATE api integration IF NOT EXISTS aws_m1_api_dev api_provider = aws_api_gateway api_aws_role_arn = 'arn:aws:iam::704693948482:role/m1-api-stg-rolesnowflakeudfsAF733095-bEigMvFi81Fd' api_allowed_prefixes = (
|
||||
'https://z2wyjkp9r7.execute-api.us-east-1.amazonaws.com/stg/'
|
||||
) enabled = TRUE;
|
||||
{% endset %}
|
||||
{% do run_query(sql) %}
|
||||
{% endif %}
|
||||
{% endmacro %}
|
||||
70
macros/streamline/models.sql
Normal file
70
macros/streamline/models.sql
Normal file
@ -0,0 +1,70 @@
|
||||
{% macro streamline_external_table_query_v2(
|
||||
model,
|
||||
partition_function
|
||||
) %}
|
||||
|
||||
{% set days = var("BRONZE_LOOKBACK_DAYS")%}
|
||||
|
||||
WITH meta AS (
|
||||
SELECT
|
||||
last_modified AS inserted_timestamp,
|
||||
file_name,
|
||||
{{ partition_function }} AS partition_key
|
||||
FROM
|
||||
TABLE(
|
||||
information_schema.external_table_file_registration_history(
|
||||
start_time => DATEADD('day', -ABS({{days}}), CURRENT_TIMESTAMP()),
|
||||
table_name => '{{ source( "bronze_streamline", model) }}')
|
||||
) A
|
||||
)
|
||||
SELECT
|
||||
s.*,
|
||||
b.file_name,
|
||||
inserted_timestamp
|
||||
FROM
|
||||
{{ source(
|
||||
"bronze_streamline",
|
||||
model
|
||||
) }}
|
||||
s
|
||||
JOIN meta b
|
||||
ON b.file_name = metadata$filename
|
||||
AND b.partition_key = s.partition_key
|
||||
WHERE
|
||||
b.partition_key = s.partition_key
|
||||
AND DATA :error IS NULL
|
||||
{% endmacro %}
|
||||
|
||||
{% macro streamline_external_table_FR_query_v2(
|
||||
model,
|
||||
partition_function
|
||||
) %}
|
||||
WITH meta AS (
|
||||
SELECT
|
||||
registered_on AS inserted_timestamp,
|
||||
file_name,
|
||||
{{ partition_function }} AS partition_key
|
||||
FROM
|
||||
TABLE(
|
||||
information_schema.external_table_files(
|
||||
table_name => '{{ source( "bronze_streamline", model) }}'
|
||||
)
|
||||
) A
|
||||
)
|
||||
SELECT
|
||||
s.*,
|
||||
b.file_name,
|
||||
inserted_timestamp
|
||||
FROM
|
||||
{{ source(
|
||||
"bronze_streamline",
|
||||
model
|
||||
) }}
|
||||
s
|
||||
JOIN meta b
|
||||
ON b.file_name = metadata$filename
|
||||
AND b.partition_key = s.partition_key
|
||||
WHERE
|
||||
b.partition_key = s.partition_key
|
||||
AND DATA :error IS NULL
|
||||
{% endmacro %}
|
||||
10
macros/streamline/streamline_udfs.sql
Normal file
10
macros/streamline/streamline_udfs.sql
Normal file
@ -0,0 +1,10 @@
|
||||
{% macro create_udf_bulk_rest_api_v2() %}
|
||||
CREATE
|
||||
OR REPLACE EXTERNAL FUNCTION streamline.udf_bulk_rest_api_v2(
|
||||
json OBJECT
|
||||
) returns ARRAY api_integration = {% if target.name == "prod" %}
|
||||
aws_movement_api_prod AS 'https://d0t060jjxf.execute-api.us-east-1.amazonaws.com/prod/udf_bulk_rest_api'
|
||||
{% else %}
|
||||
aws_m1_api_dev AS 'https://z2wyjkp9r7.execute-api.us-east-1.amazonaws.com/stg/udf_bulk_rest_api'
|
||||
{%- endif %};
|
||||
{% endmacro %}
|
||||
6
macros/tags/add_database_or_schema_tags.sql
Normal file
6
macros/tags/add_database_or_schema_tags.sql
Normal file
@ -0,0 +1,6 @@
|
||||
{% macro add_database_or_schema_tags() %}
|
||||
{{ set_database_tag_value(
|
||||
'BLOCKCHAIN_NAME',
|
||||
'movement'
|
||||
) }}
|
||||
{% endmacro %}
|
||||
127
macros/tags/snowflake_tagging.sql
Normal file
127
macros/tags/snowflake_tagging.sql
Normal file
@ -0,0 +1,127 @@
|
||||
{% macro apply_meta_as_tags(results) %}
|
||||
{% if var("UPDATE_SNOWFLAKE_TAGS") %}
|
||||
{{ log('apply_meta_as_tags', info=False) }}
|
||||
{{ log(results, info=False) }}
|
||||
{% if execute %}
|
||||
|
||||
{%- set tags_by_schema = {} -%}
|
||||
{% for res in results -%}
|
||||
{% if res.node.meta.database_tags %}
|
||||
|
||||
{%- set model_database = res.node.database -%}
|
||||
{%- set model_schema = res.node.schema -%}
|
||||
{%- set model_schema_full = model_database+'.'+model_schema -%}
|
||||
{%- set model_alias = res.node.alias -%}
|
||||
|
||||
{% if model_schema_full not in tags_by_schema.keys() %}
|
||||
{{ log('need to fetch tags for schema '+model_schema_full, info=False) }}
|
||||
{%- call statement('main', fetch_result=True) -%}
|
||||
show tags in {{model_database}}.{{model_schema}}
|
||||
{%- endcall -%}
|
||||
{%- set _ = tags_by_schema.update({model_schema_full: load_result('main')['table'].columns.get('name').values()|list}) -%}
|
||||
{{ log('Added tags to cache', info=False) }}
|
||||
{% else %}
|
||||
{{ log('already have tag info for schema', info=False) }}
|
||||
{% endif %}
|
||||
|
||||
{%- set current_tags_in_schema = tags_by_schema[model_schema_full] -%}
|
||||
{{ log('current_tags_in_schema:', info=False) }}
|
||||
{{ log(current_tags_in_schema, info=False) }}
|
||||
{{ log("========== Processing tags for "+model_schema_full+"."+model_alias+" ==========", info=False) }}
|
||||
|
||||
{% set line -%}
|
||||
node: {{ res.node.unique_id }}; status: {{ res.status }} (message: {{ res.message }})
|
||||
node full: {{ res.node}}
|
||||
meta: {{ res.node.meta}}
|
||||
materialized: {{ res.node.config.materialized }}
|
||||
{%- endset %}
|
||||
{{ log(line, info=False) }}
|
||||
|
||||
{%- call statement('main', fetch_result=True) -%}
|
||||
select LEVEL,UPPER(TAG_NAME) as TAG_NAME,TAG_VALUE from table(information_schema.tag_references_all_columns('{{model_schema}}.{{model_alias}}', 'table'))
|
||||
{%- endcall -%}
|
||||
{%- set existing_tags_for_table = load_result('main')['data'] -%}
|
||||
{{ log('Existing tags for table:', info=False) }}
|
||||
{{ log(existing_tags_for_table, info=False) }}
|
||||
|
||||
{{ log('--', info=False) }}
|
||||
{% for table_tag in res.node.meta.database_tags.table %}
|
||||
|
||||
{{ create_tag_if_missing(current_tags_in_schema,table_tag|upper) }}
|
||||
{% set desired_tag_value = res.node.meta.database_tags.table[table_tag] %}
|
||||
|
||||
{{set_table_tag_value_if_different(model_schema,model_alias,table_tag,desired_tag_value,existing_tags_for_table)}}
|
||||
{% endfor %}
|
||||
{{ log("========== Finished processing tags for "+model_alias+" ==========", info=False) }}
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
{% macro create_tag_if_missing(all_tag_names,table_tag) %}
|
||||
{% if table_tag not in all_tag_names %}
|
||||
{{ log('Creating missing tag '+table_tag, info=False) }}
|
||||
{%- call statement('main', fetch_result=True) -%}
|
||||
create tag if not exists silver.{{table_tag}}
|
||||
{%- endcall -%}
|
||||
{{ log(load_result('main').data, info=False) }}
|
||||
{% else %}
|
||||
{{ log('Tag already exists: '+table_tag, info=False) }}
|
||||
{% endif %}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro set_table_tag_value_if_different(model_schema,table_name,tag_name,desired_tag_value,existing_tags) %}
|
||||
{{ log('Ensuring tag '+tag_name+' has value '+desired_tag_value+' at table level', info=False) }}
|
||||
{%- set existing_tag_for_table = existing_tags|selectattr('0','equalto','TABLE')|selectattr('1','equalto',tag_name|upper)|list -%}
|
||||
{{ log('Filtered tags for table:', info=False) }}
|
||||
{{ log(existing_tag_for_table[0], info=False) }}
|
||||
{% if existing_tag_for_table|length > 0 and existing_tag_for_table[0][2]==desired_tag_value %}
|
||||
{{ log('Correct tag value already exists', info=False) }}
|
||||
{% else %}
|
||||
{{ log('Setting tag value for '+tag_name+' to value '+desired_tag_value, info=False) }}
|
||||
{%- call statement('main', fetch_result=True) -%}
|
||||
alter table {{model_schema}}.{{table_name}} set tag {{tag_name}} = '{{desired_tag_value}}'
|
||||
{%- endcall -%}
|
||||
{{ log(load_result('main').data, info=False) }}
|
||||
{% endif %}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro set_column_tag_value_if_different(table_name,column_name,tag_name,desired_tag_value,existing_tags) %}
|
||||
{{ log('Ensuring tag '+tag_name+' has value '+desired_tag_value+' at column level', info=False) }}
|
||||
{%- set existing_tag_for_column = existing_tags|selectattr('0','equalto','COLUMN')|selectattr('1','equalto',tag_name|upper)|list -%}
|
||||
{{ log('Filtered tags for column:', info=False) }}
|
||||
{{ log(existing_tag_for_column[0], info=False) }}
|
||||
{% if existing_tag_for_column|length > 0 and existing_tag_for_column[0][2]==desired_tag_value %}
|
||||
{{ log('Correct tag value already exists', info=False) }}
|
||||
{% else %}
|
||||
{{ log('Setting tag value for '+tag_name+' to value '+desired_tag_value, info=False) }}
|
||||
{%- call statement('main', fetch_result=True) -%}
|
||||
alter table {{table_name}} modify column {{column_name}} set tag {{tag_name}} = '{{desired_tag_value}}'
|
||||
{%- endcall -%}
|
||||
{{ log(load_result('main').data, info=False) }}
|
||||
{% endif %}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro set_database_tag_value(tag_name,tag_value) %}
|
||||
{% set query %}
|
||||
create tag if not exists silver.{{tag_name}}
|
||||
{% endset %}
|
||||
{% do run_query(query) %}
|
||||
{% set query %}
|
||||
alter database {{target.database}} set tag {{target.database}}.silver.{{tag_name}} = '{{tag_value}}'
|
||||
{% endset %}
|
||||
{% do run_query(query) %}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro set_schema_tag_value(target_schema,tag_name,tag_value) %}
|
||||
{% set query %}
|
||||
create tag if not exists silver.{{tag_name}}
|
||||
{% endset %}
|
||||
{% do run_query(query) %}
|
||||
{% set query %}
|
||||
alter schema {{target.database}}.{{target_schema}} set tag {{target.database}}.silver.{{tag_name}} = '{{tag_value}}'
|
||||
{% endset %}
|
||||
{% do run_query(query) %}
|
||||
{% endmacro %}
|
||||
29
macros/tests/compare_model_subset.sql
Normal file
29
macros/tests/compare_model_subset.sql
Normal file
@ -0,0 +1,29 @@
|
||||
{% test compare_model_subset(model, compare_model, compare_columns, model_condition) %}
|
||||
|
||||
{% set compare_cols_csv = compare_columns | join(', ') %}
|
||||
|
||||
with a as (
|
||||
select {{compare_cols_csv}} from {{ model }}
|
||||
{{ model_condition }}
|
||||
),
|
||||
b as (
|
||||
select {{compare_cols_csv}} from {{ compare_model }}
|
||||
),
|
||||
a_minus_b as (
|
||||
select * from a
|
||||
except
|
||||
select * from b
|
||||
),
|
||||
b_minus_a as (
|
||||
select * from b
|
||||
except
|
||||
select * from a
|
||||
),
|
||||
unioned as (
|
||||
select 'in_actual_not_in_expected' as which_diff, a_minus_b.* from a_minus_b
|
||||
union all
|
||||
select 'in_expected_not_in_actual' as which_diff, b_minus_a.* from b_minus_a
|
||||
)
|
||||
select * from unioned
|
||||
|
||||
{% endtest %}
|
||||
37
macros/tests/sequence_gaps.sql
Normal file
37
macros/tests/sequence_gaps.sql
Normal file
@ -0,0 +1,37 @@
|
||||
{% macro sequence_gaps(
|
||||
table,
|
||||
partition_by,
|
||||
column
|
||||
) %}
|
||||
{%- set partition_sql = partition_by | join(", ") -%}
|
||||
{%- set previous_column = "prev_" ~ column -%}
|
||||
WITH source AS (
|
||||
SELECT
|
||||
{{ partition_sql + "," if partition_sql }}
|
||||
{{ column }},
|
||||
LAG(
|
||||
{{ column }},
|
||||
1
|
||||
) over (
|
||||
{{ "PARTITION BY " ~ partition_sql if partition_sql }}
|
||||
ORDER BY
|
||||
{{ column }} ASC
|
||||
) AS {{ previous_column }}
|
||||
FROM
|
||||
{{ table }}
|
||||
WHERE
|
||||
block_timestamp::date <= current_date - 1
|
||||
)
|
||||
SELECT
|
||||
{{ partition_sql + "," if partition_sql }}
|
||||
{{ previous_column }},
|
||||
{{ column }},
|
||||
{{ column }} - {{ previous_column }}
|
||||
- 1 AS gap
|
||||
FROM
|
||||
source
|
||||
WHERE
|
||||
{{ column }} - {{ previous_column }} <> 1
|
||||
ORDER BY
|
||||
gap DESC
|
||||
{% endmacro %}
|
||||
33
macros/tests/tx_gaps.sql
Normal file
33
macros/tests/tx_gaps.sql
Normal file
@ -0,0 +1,33 @@
|
||||
{% macro tx_gaps(
|
||||
model
|
||||
) %}
|
||||
WITH block_base AS (
|
||||
SELECT
|
||||
block_id,
|
||||
tx_count
|
||||
FROM
|
||||
{{ ref('silver__blocks') }}
|
||||
),
|
||||
model_name AS (
|
||||
SELECT
|
||||
block_id,
|
||||
COUNT(
|
||||
DISTINCT tx_id
|
||||
) AS model_tx_count
|
||||
FROM
|
||||
{{ model }}
|
||||
GROUP BY
|
||||
block_id
|
||||
)
|
||||
SELECT
|
||||
block_base.block_id,
|
||||
tx_count,
|
||||
model_name.block_id,
|
||||
model_tx_count
|
||||
FROM
|
||||
block_base
|
||||
LEFT JOIN model_name
|
||||
ON block_base.block_id = model_name.block_id
|
||||
WHERE
|
||||
tx_count <> model_tx_count
|
||||
{% endmacro %}
|
||||
42
macros/utils.sql
Normal file
42
macros/utils.sql
Normal file
@ -0,0 +1,42 @@
|
||||
{% macro if_data_call_wait() %}
|
||||
{% if var(
|
||||
"STREAMLINE_INVOKE_STREAMS"
|
||||
) %}
|
||||
{% set query %}
|
||||
SELECT
|
||||
1
|
||||
WHERE
|
||||
EXISTS(
|
||||
SELECT
|
||||
1
|
||||
FROM
|
||||
{{ model.schema ~ "." ~ model.alias }}
|
||||
LIMIT
|
||||
1
|
||||
) {% endset %}
|
||||
{% if execute %}
|
||||
{% set results = run_query(
|
||||
query
|
||||
) %}
|
||||
{% if results %}
|
||||
{{ log(
|
||||
"Waiting...",
|
||||
info = True
|
||||
) }}
|
||||
|
||||
{% set wait_query %}
|
||||
SELECT
|
||||
system$wait(
|
||||
{{ var(
|
||||
"WAIT",
|
||||
600
|
||||
) }}
|
||||
) {% endset %}
|
||||
{% do run_query(wait_query) %}
|
||||
{% else %}
|
||||
SELECT
|
||||
NULL;
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
{% endmacro %}
|
||||
7
models/bronze/core/bronze__blocks_tx.sql
Normal file
7
models/bronze/core/bronze__blocks_tx.sql
Normal file
@ -0,0 +1,7 @@
|
||||
{{ config (
|
||||
materialized = 'view'
|
||||
) }}
|
||||
{{ streamline_external_table_query_v2(
|
||||
model = "blocks_tx",
|
||||
partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER )"
|
||||
) }}
|
||||
7
models/bronze/core/bronze__blocks_tx_FR.sql
Normal file
7
models/bronze/core/bronze__blocks_tx_FR.sql
Normal file
@ -0,0 +1,7 @@
|
||||
{{ config (
|
||||
materialized = 'view'
|
||||
) }}
|
||||
{{ streamline_external_table_FR_query_v2(
|
||||
model = "blocks_tx",
|
||||
partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER )"
|
||||
) }}
|
||||
7
models/bronze/core/bronze__transactions.sql
Normal file
7
models/bronze/core/bronze__transactions.sql
Normal file
@ -0,0 +1,7 @@
|
||||
{{ config (
|
||||
materialized = 'view'
|
||||
) }}
|
||||
{{ streamline_external_table_query_v2(
|
||||
model = "transactions",
|
||||
partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER )"
|
||||
) }}
|
||||
7
models/bronze/core/bronze__transactions_FR.sql
Normal file
7
models/bronze/core/bronze__transactions_FR.sql
Normal file
@ -0,0 +1,7 @@
|
||||
{{ config (
|
||||
materialized = 'view'
|
||||
) }}
|
||||
{{ streamline_external_table_FR_query_v2(
|
||||
model = "transactions",
|
||||
partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER )"
|
||||
) }}
|
||||
29
models/bronze/prices/bronze__complete_native_prices.sql
Normal file
29
models/bronze/prices/bronze__complete_native_prices.sql
Normal file
@ -0,0 +1,29 @@
|
||||
{{ config (
|
||||
materialized = 'view'
|
||||
) }}
|
||||
|
||||
SELECT
|
||||
HOUR,
|
||||
asset_id,
|
||||
symbol,
|
||||
NAME,
|
||||
decimals,
|
||||
price,
|
||||
blockchain,
|
||||
is_imputed,
|
||||
is_deprecated,
|
||||
provider,
|
||||
source,
|
||||
_inserted_timestamp,
|
||||
inserted_timestamp,
|
||||
modified_timestamp,
|
||||
complete_native_prices_id,
|
||||
_invocation_id
|
||||
FROM
|
||||
{{ source(
|
||||
'crosschain_silver',
|
||||
'complete_native_prices'
|
||||
) }}
|
||||
WHERE
|
||||
blockchain = 'sei'
|
||||
AND symbol = 'SEI'
|
||||
70
models/descriptions/__overview__.md
Normal file
70
models/descriptions/__overview__.md
Normal file
@ -0,0 +1,70 @@
|
||||
{% docs __overview__ %}
|
||||
|
||||
# Welcome to the Flipside Crypto m1 Models Documentation
|
||||
|
||||
## **What does this documentation cover?**
|
||||
The documentation included here details the design of the m1
|
||||
tables and views available via [Flipside Crypto.](https://flipsidecrypto.xyz/) For more information on how these models are built, please see [the github repository.](https://github.com/flipsideCrypto/m1-models/)
|
||||
|
||||
## **How do I use these docs?**
|
||||
The easiest way to navigate this documentation is to use the Quick Links below. These links will take you to the documentation for each table, which contains a description, a list of the columns, and other helpful information.
|
||||
|
||||
If you are experienced with dbt docs, feel free to use the sidebar to navigate the documentation, as well as explore the relationships between tables and the logic building them.
|
||||
|
||||
There is more information on how to use dbt docs in the last section of this document.
|
||||
|
||||
## **Quick Links to Table Documentation**
|
||||
|
||||
**Click on the links below to jump to the documentation for each schema.**
|
||||
|
||||
### Core Fact Tables (`m1`.`CORE`.`<table_name>`)
|
||||
- [fact_blocks](#!/model/model.movement_models.core__fact_blocks)
|
||||
- [fact_events](#!/model/model.movement_models.core__fact_events)
|
||||
- [fact_changes](#!/model/model.movement_models.core__fact_changes)
|
||||
- [fact_transactions](#!/model/model.movement_models.core__fact_transactions)
|
||||
- [fact_transactions_block_metadata](#!/model/model.movement_models.core__fact_transactions_block_metadata)
|
||||
- [fact_transactions_state_checkpoint](#!/model/model.movement_models.core__fact_transactions_state_checkpoint)
|
||||
|
||||
|
||||
|
||||
|
||||
## **Data Model Overview**
|
||||
|
||||
The m1 models are built a few different ways, but the core fact tables are built using three layers of sql models: **bronze, silver, and gold (or core).**
|
||||
|
||||
- Bronze: Data is loaded in from the source as a view
|
||||
- Silver: All necessary parsing, filtering, de-duping, and other transformations are done here
|
||||
- Gold (or core): Final views and tables that are available publicly
|
||||
|
||||
The dimension tables are sourced from a variety of on-chain and off-chain sources.
|
||||
|
||||
Convenience views (denoted ez_) are a combination of different fact and dimension tables. These views are built to make it easier to query the data.
|
||||
|
||||
## **Using dbt docs**
|
||||
### Navigation
|
||||
|
||||
You can use the ```Project``` and ```Database``` navigation tabs on the left side of the window to explore the models in the project.
|
||||
|
||||
### Database Tab
|
||||
|
||||
This view shows relations (tables and views) grouped into database schemas. Note that ephemeral models are *not* shown in this interface, as they do not exist in the database.
|
||||
|
||||
### Graph Exploration
|
||||
|
||||
You can click the blue icon on the bottom-right corner of the page to view the lineage graph of your models.
|
||||
|
||||
On model pages, you'll see the immediate parents and children of the model you're exploring. By clicking the Expand button at the top-right of this lineage pane, you'll be able to see all of the models that are used to build, or are built from, the model you're exploring.
|
||||
|
||||
Once expanded, you'll be able to use the ```--models``` and ```--exclude``` model selection syntax to filter the models in the graph. For more information on model selection, check out the [dbt docs](https://docs.getdbt.com/docs/model-selection-syntax).
|
||||
|
||||
Note that you can also right-click on models to interactively filter and explore the graph.
|
||||
|
||||
|
||||
### **More information**
|
||||
- [Flipside](https://flipsidecrypto.xyz/)
|
||||
- [Velocity](https://app.flipsidecrypto.com/velocity?nav=Discover)
|
||||
- [Tutorials](https://docs.flipsidecrypto.com/our-data/tutorials)
|
||||
- [Github](https://github.com/FlipsideCrypto/m1-models)
|
||||
- [What is dbt?](https://docs.getdbt.com/docs/introduction)
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/accumulator_root_hash.md
Normal file
5
models/descriptions/accumulator_root_hash.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs accumulator_root_hash %}
|
||||
|
||||
The root hash of a Merkle accumulator.
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/address.md
Normal file
5
models/descriptions/address.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs address %}
|
||||
|
||||
Address unique to an individual wallet, validator, or token.
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/address_change.md
Normal file
5
models/descriptions/address_change.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs address_change %}
|
||||
|
||||
The top level address for this change.
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/address_event.md
Normal file
5
models/descriptions/address_event.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs address_event %}
|
||||
|
||||
The top level address for this event.
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/amount.md
Normal file
5
models/descriptions/amount.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs amount %}
|
||||
|
||||
The non-decimal adjusted amount of a token. For example, if a token has 18 decimals, then the amount of 1 token is 10^18.
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/block_hash.md
Normal file
5
models/descriptions/block_hash.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs block_hash %}
|
||||
|
||||
The hash of the block header for a given block.
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/block_number.md
Normal file
5
models/descriptions/block_number.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs block_number %}
|
||||
|
||||
Also known as block height. The block number, which indicates the length of the blockchain, increases after the addition of each new block.
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/block_timestamp.md
Normal file
5
models/descriptions/block_timestamp.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs block_timestamp %}
|
||||
|
||||
The date and time at which the block was produced.
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/blockchain.md
Normal file
5
models/descriptions/blockchain.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs blockchain %}
|
||||
|
||||
The name of the blockchain
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/change_address.md
Normal file
5
models/descriptions/change_address.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs change_address %}
|
||||
|
||||
The first segment of the inner change type
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/change_data.md
Normal file
5
models/descriptions/change_data.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs change_data %}
|
||||
|
||||
The "data" object within this change.
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/change_index.md
Normal file
5
models/descriptions/change_index.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs change_index %}
|
||||
|
||||
Unique identifier for the change. This is a monotonically increasing integer that is incremented for each change. This is useful for determining the order of changes.
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/change_module.md
Normal file
5
models/descriptions/change_module.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs change_module %}
|
||||
|
||||
The second segment of the inner change type
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/change_resource.md
Normal file
5
models/descriptions/change_resource.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs change_resource %}
|
||||
|
||||
The third segment of the inner change type
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/change_type.md
Normal file
5
models/descriptions/change_type.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs change_type %}
|
||||
|
||||
The "type" object from within this change. Values are: delete_resource, delete_table_item, write_module, write_resource, write_table_item.
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/changes.md
Normal file
5
models/descriptions/changes.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs changes %}
|
||||
|
||||
The changes that the transaction executed.
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/creation_number.md
Normal file
5
models/descriptions/creation_number.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs creation_number %}
|
||||
|
||||
Ceation number corresponding to the event stream originating from the given account.
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/creator.md
Normal file
5
models/descriptions/creator.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs creator %}
|
||||
|
||||
Name of the label creator - for now, this will always be "Flipside."
|
||||
|
||||
{% enddocs %}
|
||||
55
models/descriptions/defi__fact_bridge_activity.md
Normal file
55
models/descriptions/defi__fact_bridge_activity.md
Normal file
@ -0,0 +1,55 @@
|
||||
|
||||
{% docs bridge_platform %}
|
||||
|
||||
The platform or protocol from which the bridge transaction or event originates.
|
||||
|
||||
{% enddocs %}
|
||||
|
||||
{% docs bridge_sender %}
|
||||
|
||||
The address that initiated the bridge deposit or transfer. This address is the sender of the tokens/assets being bridged to the destination chain.
|
||||
|
||||
{% enddocs %}
|
||||
|
||||
{% docs bridge_receiver %}
|
||||
|
||||
The designated address set to receive the bridged tokens on the target chain after the completion of the bridge transaction. For non-evm chains, the hex address is decoded/encoded to match the data format of the destination chain, where possible.
|
||||
|
||||
{% enddocs %}
|
||||
|
||||
{% docs destination_chain %}
|
||||
|
||||
The name of the blockchain network to which the assets are being bridged.
|
||||
|
||||
{% enddocs %}
|
||||
|
||||
{% docs destination_chain_id %}
|
||||
|
||||
The numeric identifier associated with the destination blockchain network. This is specific to the chain and helps in uniquely identifying it.
|
||||
|
||||
{% enddocs %}
|
||||
|
||||
{% docs bridge_address %}
|
||||
|
||||
The address of the contract responsible for handling the bridge deposit or transfer. This contract mediates the transfer and ensures that assets are sent and received appropriately.
|
||||
|
||||
{% enddocs %}
|
||||
|
||||
|
||||
{% docs bridge_token_address %}
|
||||
|
||||
The address associated with the token that is being bridged. It provides a unique identifier for the token within its origin blockchain.
|
||||
|
||||
{% enddocs %}
|
||||
|
||||
{% docs source_chain_id %}
|
||||
|
||||
The numeric identifier associated with the source blockchain network. This is specific to the chain and helps in uniquely identifying it.
|
||||
|
||||
{% enddocs %}
|
||||
|
||||
{% docs source_chain %}
|
||||
|
||||
The name of the blockchain network from which the assets are being bridged.
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/epoch.md
Normal file
5
models/descriptions/epoch.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs epoch %}
|
||||
|
||||
An epoch in the Aptos blockchain is defined as a duration of time, in seconds, during which a number of blocks are voted on by the validators, the validator set is updated, and the rewards are distributed to the validators. The Aptos mainnet epoch is set as 7200 seconds (two hours).
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/event_address.md
Normal file
5
models/descriptions/event_address.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs event_address %}
|
||||
|
||||
The first segment of the event type
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/event_data.md
Normal file
5
models/descriptions/event_data.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs event_data %}
|
||||
|
||||
The "data" object within this event.
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/event_index.md
Normal file
5
models/descriptions/event_index.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs event_index %}
|
||||
|
||||
Unique identifier for the event. This is a monotonically increasing integer that is incremented for each event. This is useful for determining the order of changes.
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/event_module.md
Normal file
5
models/descriptions/event_module.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs event_module %}
|
||||
|
||||
The second segment of the event type
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/event_resource.md
Normal file
5
models/descriptions/event_resource.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs event_resource %}
|
||||
|
||||
The third segment of the event type
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/event_root_hash.md
Normal file
5
models/descriptions/event_root_hash.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs event_root_hash %}
|
||||
|
||||
The root hash for the event.
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/event_type.md
Normal file
5
models/descriptions/event_type.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs event_type %}
|
||||
|
||||
The full three-part descriptive type from event. The event type consists of the event_address :: event_module :: event_resource.
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/events.md
Normal file
5
models/descriptions/events.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs events %}
|
||||
|
||||
The events that the transaction executed.
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/expiration_timestamp_secs.md
Normal file
5
models/descriptions/expiration_timestamp_secs.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs expiration_timestamp_secs %}
|
||||
|
||||
The time at which the transaction ceases to valid.
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/first_version.md
Normal file
5
models/descriptions/first_version.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs first_version %}
|
||||
|
||||
The version number of the first transaction in the block.
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/from_address_transfer.md
Normal file
5
models/descriptions/from_address_transfer.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs from_address_transfer %}
|
||||
|
||||
The account address that sent the transfer.
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/gas_unit_price.md
Normal file
5
models/descriptions/gas_unit_price.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs gas_unit_price %}
|
||||
|
||||
The cost per unit of gas, determining the transaction fee paid by the sender for each unit of computational resource consumed
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/gas_used.md
Normal file
5
models/descriptions/gas_used.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs gas_used %}
|
||||
|
||||
The amount of gas used for the transaction
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/handle_change.md
Normal file
5
models/descriptions/handle_change.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs handle_change %}
|
||||
|
||||
The top level handle for this change.
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/inner_change_type.md
Normal file
5
models/descriptions/inner_change_type.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs inner_change_type %}
|
||||
|
||||
The full three-part descriptive change type from change. The inner change type consists of the change_address :: change_module :: change_resource.
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/inserted_timestamp.md
Normal file
5
models/descriptions/inserted_timestamp.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs inserted_timestamp %}
|
||||
|
||||
The utc timestamp at which the row was inserted into the table.
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/key_change.md
Normal file
5
models/descriptions/key_change.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs key_change %}
|
||||
|
||||
The key value for the write_table_item change
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/label.md
Normal file
5
models/descriptions/label.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs label %}
|
||||
|
||||
The label or name of the address.
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/label_subtype.md
Normal file
5
models/descriptions/label_subtype.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs label_subtype %}
|
||||
|
||||
Adds more detail to the label type.
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/label_type.md
Normal file
5
models/descriptions/label_type.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs label_type %}
|
||||
|
||||
A broad category that describes what a label is representing.
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/last_version.md
Normal file
5
models/descriptions/last_version.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs last_version %}
|
||||
|
||||
The version number of the last transaction in the block.
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/max_gas_amount.md
Normal file
5
models/descriptions/max_gas_amount.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs max_gas_amount %}
|
||||
|
||||
The maximum amount of gas allocated for the execution of a transaction
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/modified_timestamp.md
Normal file
5
models/descriptions/modified_timestamp.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs modified_timestamp %}
|
||||
|
||||
The utc timestamp at which the row was last modified.
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/payload.md
Normal file
5
models/descriptions/payload.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs payload %}
|
||||
|
||||
The data that is being carried by a transaction.
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/payload_function.md
Normal file
5
models/descriptions/payload_function.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs payload_function %}
|
||||
|
||||
The function that is being called in the transaction payload.
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/pk.md
Normal file
5
models/descriptions/pk.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs pk %}
|
||||
|
||||
The unique identifier for each row in the table.
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/proposer.md
Normal file
5
models/descriptions/proposer.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs proposer %}
|
||||
|
||||
The block proposer.
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/round.md
Normal file
5
models/descriptions/round.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs round %}
|
||||
|
||||
A round number is a shared counter used to select leaders during an epoch of the consensus protocol.
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/sender.md
Normal file
5
models/descriptions/sender.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs sender %}
|
||||
|
||||
Sender is the address of the originator account for a transaction. A transaction must be signed by the originator.
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/sequence_number.md
Normal file
5
models/descriptions/sequence_number.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs sequence_number %}
|
||||
|
||||
The sequence number for an account indicates the number of transactions that have been submitted and committed on chain from that account. It is incremented every time a transaction sent from that account is executed or aborted and stored in the blockchain.
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/signature.md
Normal file
5
models/descriptions/signature.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs signature %}
|
||||
|
||||
A signature is the result of hashing the signing message with the client's private key. By default Aptos uses the Ed25519 scheme to generate the signature of the raw transaction.
|
||||
|
||||
{% enddocs %}
|
||||
71
models/descriptions/stats/stats_core.md
Normal file
71
models/descriptions/stats/stats_core.md
Normal file
@ -0,0 +1,71 @@
|
||||
{% docs ez_core_metrics_hourly_table_doc %}
|
||||
|
||||
A convenience table that aggregates block and transaction related metrics using various aggregate functions such as SUM, COUNT, MIN and MAX from the fact_transactions table, on an hourly basis. Stats for the current hour will be updated as new data arrives.
|
||||
|
||||
{% enddocs %}
|
||||
|
||||
{% docs block_timestamp_hour %}
|
||||
|
||||
The hour of the timestamp of the block.
|
||||
|
||||
{% enddocs %}
|
||||
|
||||
{% docs block_number_min %}
|
||||
|
||||
The minimum block number in the hour.
|
||||
|
||||
{% enddocs %}
|
||||
|
||||
{% docs block_number_max %}
|
||||
|
||||
The maximum block number in the hour.
|
||||
|
||||
{% enddocs %}
|
||||
|
||||
{% docs block_count %}
|
||||
|
||||
The number of blocks in the hour.
|
||||
|
||||
{% enddocs %}
|
||||
|
||||
{% docs transaction_count %}
|
||||
|
||||
The number of transactions in the hour.
|
||||
|
||||
{% enddocs %}
|
||||
|
||||
{% docs transaction_count_success %}
|
||||
|
||||
The number of successful transactions in the hour.
|
||||
|
||||
{% enddocs %}
|
||||
|
||||
{% docs transaction_count_failed %}
|
||||
|
||||
The number of failed transactions in the hour.
|
||||
|
||||
{% enddocs %}
|
||||
|
||||
{% docs unique_sender_count %}
|
||||
|
||||
The number of unique sender address in the hour.
|
||||
|
||||
{% enddocs %}
|
||||
|
||||
{% docs unique_payload_function_count %}
|
||||
|
||||
The number of unique payload functions in the hour.
|
||||
|
||||
{% enddocs %}
|
||||
|
||||
{% docs total_fees_native %}
|
||||
|
||||
The sum of all fees in the hour, in the native fee currency.
|
||||
|
||||
{% enddocs %}
|
||||
|
||||
{% docs total_fees_usd %}
|
||||
|
||||
The sum of all fees in the hour, in USD.
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/success.md
Normal file
5
models/descriptions/success.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs success %}
|
||||
|
||||
The boolean value indicating whether the transaction was successful or not.
|
||||
|
||||
{% enddocs %}
|
||||
8
models/descriptions/tables/core__fact_blocks.md
Normal file
8
models/descriptions/tables/core__fact_blocks.md
Normal file
@ -0,0 +1,8 @@
|
||||
{% docs core__fact_blocks %}
|
||||
|
||||
This table contains "block" level data for the m1 blockchain. This table can be used to analyze trends at a block level, for example total transactions over time.
|
||||
"The m1 blockchain doesn't have an explicit notion of a block — it only uses blocks for batching and executing transactions.
|
||||
A transaction at height 0 is the first transaction (genesis transaction), and a transaction at height 100 is the 101st transaction in the transaction store."
|
||||
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/tables/core__fact_changes.md
Normal file
5
models/descriptions/tables/core__fact_changes.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs core__fact_changes %}
|
||||
|
||||
This table contains the flattened changes from the transaction. Each change will have a unique change index within a transaction.
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/tables/core__fact_events.md
Normal file
5
models/descriptions/tables/core__fact_events.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs core__fact_events %}
|
||||
|
||||
This table contains the flattened events from the transaction. Each event will have a unique event index within a transaction.
|
||||
|
||||
{% enddocs %}
|
||||
6
models/descriptions/tables/core__fact_transactions.md
Normal file
6
models/descriptions/tables/core__fact_transactions.md
Normal file
@ -0,0 +1,6 @@
|
||||
{% docs core__fact_transactions %}
|
||||
|
||||
This table contains transaction level data for the Aptos blockchain. Each transaction will have a unique transaction hash and version.
|
||||
For more information see [docs.movementlabs.xyz docs] Each transaction will have a unique transaction hash
|
||||
|
||||
{% enddocs %}
|
||||
@ -0,0 +1,5 @@
|
||||
{% docs core__fact_transactions_block_metadata %}
|
||||
|
||||
These transactions are inserted at the beginning of the block. A BlockMetadata transaction can also mark the end of an epoch and trigger reward distribution to validators.
|
||||
|
||||
{% enddocs %}
|
||||
@ -0,0 +1,5 @@
|
||||
{% docs core__fact_transactions_state_checkpoint %}
|
||||
|
||||
These transactions are appended at the end of the block and is used as a checkpoint milestone.
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/to_address_transfer.md
Normal file
5
models/descriptions/to_address_transfer.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs to_address_transfer %}
|
||||
|
||||
The account address that received the transfer.
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/token_address.md
Normal file
5
models/descriptions/token_address.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs token_address %}
|
||||
|
||||
The full address of the token. This string contains the account,module, and resource.
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/transfer_event.md
Normal file
5
models/descriptions/transfer_event.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs transfer_event %}
|
||||
|
||||
The type of transfer event. Value will either be 'WithdrawEvent' or 'DepositEvent'
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/tx_count.md
Normal file
5
models/descriptions/tx_count.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs tx_count %}
|
||||
|
||||
The count of transactions in this block.
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/tx_hash.md
Normal file
5
models/descriptions/tx_hash.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs tx_hash %}
|
||||
|
||||
Transaction hash is a unique 66-character identifier that is generated when a transaction is executed.
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/tx_type.md
Normal file
5
models/descriptions/tx_type.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs tx_type %}
|
||||
|
||||
The type of the transaction. Values will be one of "block_metadata_transaction","state_checkpoint_transaction","user_transaction".
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/value_change.md
Normal file
5
models/descriptions/value_change.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs value_change %}
|
||||
|
||||
The value for the write_table_item change
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/version.md
Normal file
5
models/descriptions/version.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs version %}
|
||||
|
||||
The version also know as the height of the transactions that have been executed on the Aptos blockchain. The first transaction in the blockchain has a version of 0. The version number is incremented by 1 for each transaction that is executed on the blockchain.
|
||||
|
||||
{% enddocs %}
|
||||
5
models/descriptions/vm_status.md
Normal file
5
models/descriptions/vm_status.md
Normal file
@ -0,0 +1,5 @@
|
||||
{% docs vm_status %}
|
||||
|
||||
For failed transactions, this fields provides context to why the transaction failed. For successful transactions, this field will be set to `Executed successfully`.
|
||||
|
||||
{% enddocs %}
|
||||
53
models/evm/bronze/api_udf/bronze_evm_api__contract_abis.sql
Normal file
53
models/evm/bronze/api_udf/bronze_evm_api__contract_abis.sql
Normal file
@ -0,0 +1,53 @@
|
||||
{{ config(
|
||||
materialized = 'incremental',
|
||||
unique_key = "contract_address",
|
||||
full_refresh = false,
|
||||
tags = ['noncore']
|
||||
) }}
|
||||
|
||||
WITH base AS (
|
||||
|
||||
SELECT
|
||||
contract_address
|
||||
FROM
|
||||
{{ ref('silver_evm__relevant_contracts') }}
|
||||
WHERE
|
||||
total_interaction_count >= 100
|
||||
|
||||
{% if is_incremental() %}
|
||||
EXCEPT
|
||||
SELECT
|
||||
contract_address
|
||||
FROM
|
||||
{{ this }}
|
||||
{% endif %}
|
||||
LIMIT
|
||||
50
|
||||
), row_nos AS (
|
||||
SELECT
|
||||
contract_address,
|
||||
ROW_NUMBER() over (
|
||||
ORDER BY
|
||||
contract_address
|
||||
) AS row_no
|
||||
FROM
|
||||
base
|
||||
),
|
||||
batched AS ({% for item in range(51) %}
|
||||
SELECT
|
||||
rn.contract_address, CONCAT('https://seitrace.com/pacific-1/api/v2/smart-contracts/', contract_address) AS url, IFNULL(live.udf_api(url) :data :abi, ARRAY_CONSTRUCT('ABI unavailable')) AS abi_data, SYSDATE() AS _inserted_timestamp
|
||||
FROM
|
||||
row_nos rn
|
||||
WHERE
|
||||
row_no = {{ item }}
|
||||
|
||||
{% if not loop.last %}
|
||||
UNION ALL
|
||||
{% endif %}
|
||||
{% endfor %})
|
||||
SELECT
|
||||
contract_address,
|
||||
abi_data,
|
||||
_inserted_timestamp
|
||||
FROM
|
||||
batched
|
||||
22
models/evm/bronze/api_udf/bronze_evm_api__contract_abis.yml
Normal file
22
models/evm/bronze/api_udf/bronze_evm_api__contract_abis.yml
Normal file
@ -0,0 +1,22 @@
|
||||
version: 2
|
||||
models:
|
||||
- name: bronze_evm_api__contract_abis
|
||||
|
||||
columns:
|
||||
- name: _INSERTED_TIMESTAMP
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_row_values_to_have_recent_data:
|
||||
datepart: day
|
||||
interval: 1
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- TIMESTAMP_NTZ
|
||||
- name: CONTRACT_ADDRESS
|
||||
tests:
|
||||
- not_null
|
||||
- dbt_expectations.expect_column_values_to_be_in_type_list:
|
||||
column_type_list:
|
||||
- VARCHAR
|
||||
- dbt_expectations.expect_column_values_to_match_regex:
|
||||
regex: "^(0x)[0-9a-fA-F]{40}$"
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user