Merge pull request #6 from FlipsideCrypto/evm

Evm
This commit is contained in:
Jessica Huhnke 2023-05-23 15:45:53 -05:00 committed by GitHub
commit 43cc0bb24b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
109 changed files with 3334 additions and 55 deletions

View File

@ -3,9 +3,9 @@ run-name: dbt_run_dev_refresh
on:
workflow_dispatch:
# schedule:
# # Runs "at 9:00 UTC" (see https://crontab.guru)
# - cron: '0 9 * * *'
schedule:
# Runs "at 9:00 UTC" (see https://crontab.guru)
- cron: '0 9 * * *'
env:
USE_VARS: "${{ vars.USE_VARS }}"

View File

@ -0,0 +1,32 @@
name: dbt_run_scheduled
run-name: dbt_run_scheduled
on:
workflow_dispatch:
schedule:
- cron: '0 1,7,13,19 * * *'
env:
USE_VARS: "${{ vars.USE_VARS }}"
DBT_PROFILES_DIR: "${{ vars.DBT_PROFILES_DIR }}"
DBT_VERSION: "${{ vars.DBT_VERSION }}"
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
jobs:
called_workflow_template:
uses: FlipsideCrypto/analytics-workflow-templates/.github/workflows/dbt_run_template.yml@main
with:
dbt_command: >
dbt run -s ./models
environment: workflow_prod
warehouse: ${{ vars.WAREHOUSE }}
secrets: inherit

View File

@ -0,0 +1,44 @@
name: dbt_run_streamline_history
run-name: dbt_run_streamline_history
on:
workflow_dispatch:
schedule:
# Runs "every 6 hours" (see https://crontab.guru)
- cron: '0 1-23/6 * * *'
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v1
with:
python-version: "3.7.x"
- name: install dependencies
run: |
pip3 install dbt-snowflake==${{ vars.DBT_VERSION }} cli_passthrough requests click
dbt deps
- name: Run DBT Jobs
run: |
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True, "STREAMLINE_RUN_HISTORY":True}' -m 1+models/streamline/realtime

View File

@ -0,0 +1,44 @@
name: dbt_run_streamline_realtime
run-name: dbt_run_streamline_realtime
on:
workflow_dispatch:
schedule:
# Runs "every 1 hour at min 40" (see https://crontab.guru)
- cron: '40 */1 * * *'
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v1
with:
python-version: "3.7.x"
- name: install dependencies
run: |
pip3 install dbt-snowflake==${{ vars.DBT_VERSION }} cli_passthrough requests click
dbt deps
- name: Run DBT Jobs
run: |
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m 1+models/streamline/realtime

44
.github/workflows/dbt_test.yml vendored Normal file
View File

@ -0,0 +1,44 @@
name: dbt_test_scheduled
run-name: dbt_test_scheduled
on:
workflow_dispatch:
schedule:
- cron: '0 4 * * *'
env:
USE_VARS: "${{ vars.USE_VARS }}"
DBT_PROFILES_DIR: "${{ vars.DBT_PROFILES_DIR }}"
DBT_VERSION: "${{ vars.DBT_VERSION }}"
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v1
with:
python-version: "3.7.x"
- name: install dependencies
run: |
pip3 install dbt-snowflake==${{ vars.DBT_VERSION }} cli_passthrough requests click
dbt deps
- name: Run DBT Jobs
run: |
dbt test -m ./models

View File

@ -1,13 +1,10 @@
# Please find and replace all instances of `xyz` with your project name.
## Profile Set Up
#### Use the following within profiles.yml
----
```yml
xyz:
evmos:
target: dev
outputs:
dev:
@ -17,7 +14,7 @@ xyz:
user: <USERNAME>
password: <PASSWORD>
region: <REGION>
database: xyz_DEV
database: evmos_DEV
warehouse: <WAREHOUSE>
schema: silver
threads: 4
@ -73,5 +70,5 @@ dbt run --var '{"UPDATE_SNOWFLAKE_TAGS":True}' -s models/core/core__fact_swaps.s
```
select *
from table(xyz.information_schema.tag_references('xyz.core.fact_blocks', 'table'));
```
from table(evmos.information_schema.tag_references('evmos.core.fact_blocks', 'table'));
```

View File

@ -0,0 +1,47 @@
-- get chainhead
select ETHEREUM.STREAMLINE.UDF_API('GET','https://rpc-evmos.imperator.co/abci_info',{},{} )
--sample gen for last 1k blocks
create table sample_block_ids as (
with gen as (
select
row_number() over (
order by
seq4()
) as block_height
from
table(generator(rowcount => 100000000))
)
select top 10000 block_height from gen
-- except select 12872988 from sample_blocks
where block_height <= 12873408
order by 1 desc)
--pull one block
select
block_height,
ETHEREUM.STREAMLINE.UDF_JSON_RPC_CALL('https://rpc-evmos.imperator.co/',{},
[
{ 'id': block_height, 'jsonrpc': '2.0', 'method': 'block', 'params': [ block_height::STRING ] }
]
) data,
getdate() as _inserted_timestamp
from
(select 12889280 as block_height )
--pull one block's transactions
select
block_height,
ETHEREUM.STREAMLINE.UDF_JSON_RPC_CALL('https://rpc-evmos.imperator.co/',{},
[
{ 'id': block_height, 'jsonrpc': '2.0', 'method': 'tx_search', 'params': [ 'tx.height='||block_height::STRING , true, '1', '1000', 'asc' ] }
]
) data,
getdate() as _inserted_timestamp
from
(select 12889280 as block_height )

View File

@ -1,14 +1,14 @@
# Name your project! Project names should contain only lowercase characters
# and underscores. A good package name should reflect your organization's
# name or the intended use of these models
name: "xyz_models"
name: "evmos_models"
version: "1.0.0"
config-version: 2
require-dbt-version: ">=1.4.0"
# This setting configures which "profile" dbt uses for this project.
profile: "xyz"
profile: "evmos"
# These configurations specify where dbt should look for different types of files.
# The `model-paths` config, for example, states that models in this project can be
@ -41,7 +41,12 @@ models:
vars:
"dbt_date:time_zone": GMT
"UPDATE_SNOWFLAKE_TAGS": TRUE
STREAMLINE_INVOKE_STREAMS: False
STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES: False
UPDATE_UDFS_AND_SPS: False
STREAMLINE_RUN_HISTORY: False
UPDATE_SNOWFLAKE_TAGS: True
WAIT: 0
tests:
+store_failures: true # all tests

View File

@ -1,5 +1,5 @@
{% macro create_sps() %}
{% if target.database == 'xyz' %}
{% if target.database == 'EVMOS' %}
CREATE SCHEMA IF NOT EXISTS _internal;
{{ sp_create_prod_clone('_internal') }};
{% endif %}

View File

@ -1,2 +1,9 @@
{% macro create_udfs() %}
{{ create_udtf_get_base_table(
schema = "streamline"
) }}
{{ create_udf_get_chainhead() }}
{{ create_udf_json_rpc() }}
{{ create_udf_get_tendermint_transactions() }}
{{ create_udf_get_tendermint_validators() }}
{% endmacro %}

View File

@ -1,6 +1,6 @@
{% macro run_sp_create_prod_clone() %}
{% set clone_query %}
call xyz._internal.create_prod_clone('xyz', 'xyz_dev', 'internal_dev');
call evmos._internal.create_prod_clone('evmos', 'evmos_dev', 'internal_dev');
{% endset %}
{% do run_query(clone_query) %}

View File

@ -0,0 +1,11 @@
{% macro create_aws_ethereum_api() %}
{% if target.name == "prod" %}
{% set sql %}
CREATE api integration IF NOT EXISTS aws_evmos_api api_provider = aws_api_gateway api_aws_role_arn = 'arn:aws:iam::490041342817:role/snowflake-api-evmos' api_allowed_prefixes = (
'https://55h4rahr50.execute-api.us-east-1.amazonaws.com/dev/',
'https://n0reh6ugbf.execute-api.us-east-1.amazonaws.com/prod/'
) enabled = TRUE;
{% endset %}
{% do run_query(sql) %}
{% endif %}
{% endmacro %}

View File

@ -0,0 +1,24 @@
{% macro create_udtf_get_base_table(schema) %}
create or replace function {{ schema }}.udtf_get_base_table(max_height integer)
returns table (height number)
as
$$
with base as (
select
row_number() over (
order by
seq4()
) as id
from
table(generator(rowcount => 100000000))
)
select
id as height
from
base
where
id <= max_height
$$
;
{% endmacro %}

View File

@ -0,0 +1,171 @@
{% macro decode_logs_history(
start,
stop
) %}
WITH look_back AS (
SELECT
block_number
FROM
{{ ref("_max_block_by_date") }}
qualify ROW_NUMBER() over (
ORDER BY
block_number DESC
) = 1
)
SELECT
l.block_number,
l._log_id,
abi.data AS abi,
l.data
FROM
{{ ref("streamline__decode_logs") }}
l
INNER JOIN {{ ref("silver__abis") }}
abi
ON l.abi_address = abi.contract_address
WHERE
(
l.block_number BETWEEN {{ start }}
AND {{ stop }}
)
AND l.block_number <= (
SELECT
block_number
FROM
look_back
)
AND _log_id NOT IN (
SELECT
_log_id
FROM
{{ ref("streamline__complete_decode_logs") }}
WHERE
(
block_number BETWEEN {{ start }}
AND {{ stop }}
)
AND block_number <= (
SELECT
block_number
FROM
look_back
)
)
{% endmacro %}
{% macro streamline_external_table_query(
model,
partition_function,
partition_name,
unique_key
) %}
WITH meta AS (
SELECT
job_created_time AS _inserted_timestamp,
file_name,
{{ partition_function }} AS {{ partition_name }}
FROM
TABLE(
information_schema.external_table_file_registration_history(
start_time => DATEADD('day', -7, CURRENT_TIMESTAMP()),
table_name => '{{ source( "bronze_streamline", model) }}')
) A
)
SELECT
{{ unique_key }},
DATA,
_inserted_timestamp,
MD5(
CAST(
COALESCE(CAST({{ unique_key }} AS text), '' :: STRING) AS text
)
) AS id,
s.{{ partition_name }},
s.value AS VALUE
FROM
{{ source(
"bronze_streamline",
model
) }}
s
JOIN meta b
ON b.file_name = metadata$filename
AND b.{{ partition_name }} = s.{{ partition_name }}
WHERE
b.{{ partition_name }} = s.{{ partition_name }}
AND (
DATA :error :code IS NULL
OR DATA :error :code NOT IN (
'-32000',
'-32001',
'-32002',
'-32003',
'-32004',
'-32005',
'-32006',
'-32007',
'-32008',
'-32009',
'-32010'
)
)
{% endmacro %}
{% macro streamline_external_table_FR_query(
model,
partition_function,
partition_name,
unique_key
) %}
WITH meta AS (
SELECT
registered_on AS _inserted_timestamp,
file_name,
{{ partition_function }} AS {{ partition_name }}
FROM
TABLE(
information_schema.external_table_files(
table_name => '{{ source( "bronze_streamline", model) }}'
)
) A
)
SELECT
{{ unique_key }},
DATA,
_inserted_timestamp,
MD5(
CAST(
COALESCE(CAST({{ unique_key }} AS text), '' :: STRING) AS text
)
) AS id,
s.{{ partition_name }},
s.value AS VALUE
FROM
{{ source(
"bronze_streamline",
model
) }}
s
JOIN meta b
ON b.file_name = metadata$filename
AND b.{{ partition_name }} = s.{{ partition_name }}
WHERE
b.{{ partition_name }} = s.{{ partition_name }}
AND (
DATA :error :code IS NULL
OR DATA :error :code NOT IN (
'-32000',
'-32001',
'-32002',
'-32003',
'-32004',
'-32005',
'-32006',
'-32007',
'-32008',
'-32009',
'-32010'
)
)
{% endmacro %}

View File

@ -0,0 +1,37 @@
{% macro create_udf_get_chainhead() %}
CREATE EXTERNAL FUNCTION IF NOT EXISTS streamline.udf_get_chainhead() returns variant api_integration = aws_evmos_api AS {% if target.name == "prod" %}
'https://n0reh6ugbf.execute-api.us-east-1.amazonaws.com/prod/get_chainhead'
{% else %}
'https://55h4rahr50.execute-api.us-east-1.amazonaws.com/dev/get_chainhead'
{%- endif %};
{% endmacro %}
{% macro create_udf_json_rpc() %}
CREATE EXTERNAL FUNCTION IF NOT EXISTS streamline.udf_json_rpc(
json OBJECT
) returns ARRAY api_integration = aws_evmos_api AS {% if target.name == "prod" %}
'https://n0reh6ugbf.execute-api.us-east-1.amazonaws.com/prod/bulk_get_json_rpc'
{% else %}
'https://55h4rahr50.execute-api.us-east-1.amazonaws.com/dev/bulk_get_json_rpc'
{%- endif %};
{% endmacro %}
{% macro create_udf_get_tendermint_transactions() %}
CREATE EXTERNAL FUNCTION IF NOT EXISTS streamline.bulk_get_tendermint_transactions(
json OBJECT
) returns ARRAY api_integration = aws_evmos_api AS {% if target.name == "prod" %}
'https://n0reh6ugbf.execute-api.us-east-1.amazonaws.com/prod/bulk_get_tendermint_transactions'
{% else %}
'https://55h4rahr50.execute-api.us-east-1.amazonaws.com/dev/bulk_get_tendermint_transactions'
{%- endif %};
{% endmacro %}
{% macro create_udf_get_tendermint_validators() %}
CREATE EXTERNAL FUNCTION IF NOT EXISTS streamline.bulk_get_tendermint_validators(
json OBJECT
) returns ARRAY api_integration = aws_evmos_api AS {% if target.name == "prod" %}
'https://n0reh6ugbf.execute-api.us-east-1.amazonaws.com/prod/bulk_get_tendermint_validators'
{% else %}
'https://55h4rahr50.execute-api.us-east-1.amazonaws.com/dev/bulk_get_tendermint_validators'
{%- endif %};
{% endmacro %}

View File

@ -1,7 +1,7 @@
{% macro add_database_or_schema_tags() %}
{{ set_database_tag_value(
'BLOCKCHAIN_NAME',
'xyz'
'EVMOS'
) }}
{{ set_database_tag_value(
'BLOCKCHAIN_TYPE',

View File

@ -1,37 +1,34 @@
{% macro sequence_gaps(
table,
partition_by,
column
) %}
{%- set partition_sql = partition_by | join(", ") -%}
{%- set previous_column = "prev_" ~ column -%}
WITH source AS (
SELECT
{{ partition_sql + "," if partition_sql }}
{{ column }},
LAG(
{{ column }},
1
) over (
{{ "PARTITION BY " ~ partition_sql if partition_sql }}
ORDER BY
{{ column }} ASC
) AS {{ previous_column }}
FROM
{{ table }}
WHERE
block_timestamp::date <= current_date - 1
)
{% test sequence_gaps(
model,
partition_by,
column_name
) %}
{%- set partition_sql = partition_by | join(", ") -%}
{%- set previous_column = "prev_" ~ column_name -%}
WITH source AS (
SELECT
{{ partition_sql + "," if partition_sql }}
{{ column_name }},
LAG(
{{ column_name }},
1
) over (
{{ "PARTITION BY " ~ partition_sql if partition_sql }}
ORDER BY
{{ column_name }} ASC
) AS {{ previous_column }}
FROM
{{ model }}
)
SELECT
{{ partition_sql + "," if partition_sql }}
{{ previous_column }},
{{ column }},
{{ column }} - {{ previous_column }}
{{ column_name }},
{{ column_name }} - {{ previous_column }}
- 1 AS gap
FROM
source
WHERE
{{ column }} - {{ previous_column }} <> 1
{{ column_name }} - {{ previous_column }} <> 1
ORDER BY
gap DESC
{% endmacro %}
gap DESC {% endtest %}

78
macros/utils.sql Normal file
View File

@ -0,0 +1,78 @@
{% macro if_data_call_function(
func,
target
) %}
{% if var(
"STREAMLINE_INVOKE_STREAMS"
) %}
{% if execute %}
{{ log(
"Running macro `if_data_call_function`: Calling udf " ~ func ~ " on " ~ target,
True
) }}
{% endif %}
SELECT
{{ func }}
WHERE
EXISTS(
SELECT
1
FROM
{{ target }}
LIMIT
1
)
{% else %}
{% if execute %}
{{ log(
"Running macro `if_data_call_function`: NOOP",
False
) }}
{% endif %}
SELECT
NULL
{% endif %}
{% endmacro %}
{% macro if_data_call_wait() %}
{% if var(
"STREAMLINE_INVOKE_STREAMS"
) %}
{% set query %}
SELECT
1
WHERE
EXISTS(
SELECT
1
FROM
{{ model.schema ~ "." ~ model.alias }}
LIMIT
1
) {% endset %}
{% if execute %}
{% set results = run_query(
query
) %}
{% if results %}
{{ log(
"Waiting...",
info = True
) }}
{% set wait_query %}
SELECT
system$wait(
{{ var(
"WAIT",
600
) }}
) {% endset %}
{% do run_query(wait_query) %}
{% else %}
SELECT
NULL;
{% endif %}
{% endif %}
{% endif %}
{% endmacro %}

View File

@ -0,0 +1,60 @@
{{ config(
materialized = 'incremental',
unique_key = 'block_id',
cluster_by = ['_inserted_timestamp::date'],
merge_update_columns = ["block_id"],
) }}
WITH meta AS (
SELECT
registered_on,
last_modified,
LEAST(
last_modified,
registered_on
) AS _inserted_timestamp,
file_name
FROM
TABLE(
information_schema.external_table_files(
table_name => '{{ source( "streamline", "tendermint_blocks") }}'
)
) A
{% if is_incremental() %}
WHERE
LEAST(
registered_on,
last_modified
) >= (
SELECT
COALESCE(MAX(_INSERTED_TIMESTAMP), '1970-01-01' :: DATE) max_INSERTED_TIMESTAMP
FROM
{{ this }})
)
{% else %}
)
{% endif %}
SELECT
value,
_partition_by_block_id,
block_number AS block_id,
DATA,
TO_TIMESTAMP(
m._inserted_timestamp
) AS _inserted_timestamp
FROM
{{ source(
'streamline',
'tendermint_blocks'
) }}
JOIN meta m
ON m.file_name = metadata$filename
WHERE
DATA: error IS NULL
qualify(ROW_NUMBER() over (PARTITION BY block_number
ORDER BY
_inserted_timestamp DESC)) = 1

View File

@ -0,0 +1,60 @@
{{ config(
materialized = 'incremental',
unique_key = 'tx_id',
cluster_by = ['_inserted_timestamp::date'],
merge_update_columns = ["data", "_inserted_timestamp"],
) }}
WITH meta AS (
SELECT
registered_on,
last_modified,
LEAST(
last_modified,
registered_on
) AS _inserted_timestamp,
file_name
FROM
TABLE(
information_schema.external_table_files(
table_name => '{{ source( "streamline", "tendermint_transactions") }}'
)
) A
{% if is_incremental() %}
WHERE
LEAST(
registered_on,
last_modified
) >= (
SELECT
COALESCE(MAX(_INSERTED_TIMESTAMP), '1970-01-01' :: DATE) max_INSERTED_TIMESTAMP
FROM
{{ this }})
)
{% else %}
)
{% endif %}
SELECT
value,
_partition_by_block_id,
block_number as block_id,
value :data :hash :: STRING AS tx_id,
metadata,
DATA,
TO_TIMESTAMP(
m._inserted_timestamp
) AS _inserted_timestamp
FROM
{{ source(
'streamline',
'tendermint_transactions'
) }}
JOIN meta m
ON m.file_name = metadata$filename
WHERE
DATA: error IS NULL qualify(ROW_NUMBER() over (PARTITION BY value :data :hash :: STRING
ORDER BY
_inserted_timestamp DESC)) = 1

View File

@ -0,0 +1,10 @@
{{ config (
materialized = 'view'
) }}
{{ streamline_external_table_FR_query(
"eth_blocks",
partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER )",
partition_name = "_partition_by_block_id",
unique_key = "block_number"
) }}

View File

@ -0,0 +1,10 @@
{{ config (
materialized = 'view'
) }}
{{ streamline_external_table_FR_query(
"eth_transactions",
partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER )",
partition_name = "_partition_by_block_id",
unique_key = "block_number"
) }}

View File

@ -0,0 +1,10 @@
{{ config (
materialized = 'view'
) }}
{{ streamline_external_table_FR_query(
"tendermint_blocks",
partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER )",
partition_name = "_partition_by_block_id",
unique_key = "block_number"
) }}

View File

@ -0,0 +1,10 @@
{{ config (
materialized = 'view'
) }}
{{ streamline_external_table_FR_query(
"tendermint_transactions",
partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER )",
partition_name = "_partition_by_block_id",
unique_key = "block_number"
) }}

View File

@ -0,0 +1,10 @@
{{ config (
materialized = 'view'
) }}
{{ streamline_external_table_FR_query(
"tendermint_validators",
partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER )",
partition_name = "_partition_by_block_id",
unique_key = "block_number"
) }}

View File

@ -0,0 +1,12 @@
{{ config (
materialized = 'view'
) }}
{% set model = this.identifier.split("_") [-1] %}
{{ streamline_external_table_query(
"eth_blocks",
partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER )",
partition_name = "_partition_by_block_id",
unique_key = "block_number"
) }}

View File

@ -0,0 +1,12 @@
{{ config (
materialized = 'view'
) }}
{% set model = this.identifier.split("_") [-1] %}
{{ streamline_external_table_query(
"eth_transactions",
partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER )",
partition_name = "_partition_by_block_id",
unique_key = "block_number"
) }}

View File

@ -0,0 +1,12 @@
{{ config (
materialized = 'view'
) }}
{% set model = this.identifier.split("_") [-1] %}
{{ streamline_external_table_query(
"tendermint_blocks",
partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER )",
partition_name = "_partition_by_block_id",
unique_key = "block_number"
) }}

View File

@ -0,0 +1,12 @@
{{ config (
materialized = 'view'
) }}
{% set model = this.identifier.split("_") [-1] %}
{{ streamline_external_table_query(
"tendermint_transactions",
partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER )",
partition_name = "_partition_by_block_id",
unique_key = "block_number"
) }}

View File

@ -0,0 +1,12 @@
{{ config (
materialized = 'view'
) }}
{% set model = this.identifier.split("_") [-1] %}
{{ streamline_external_table_query(
"tendermint_validators",
partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER )",
partition_name = "_partition_by_block_id",
unique_key = "block_number"
) }}

View File

@ -0,0 +1,46 @@
{{ config(
materialized = 'table'
) }}
WITH call AS (
SELECT
ethereum.streamline.udf_api(
'GET',
'https://lcd-evmos.keplr.app/cosmos/staking/v1beta1/validators?pagination.limit=5000',{},{}
) AS resp,
SYSDATE() AS _inserted_timestamp
),
keep_last_if_failed AS (
SELECT
i.value :operator_address :: STRING AS address,
i.value AS DATA,
_inserted_timestamp,
2 AS RANK
FROM
call,
LATERAL FLATTEN(
input => resp :data :validators
) i
UNION ALL
SELECT
address,
DATA,
_inserted_timestamp,
1 AS RANK
FROM
bronze_api.get_validator_metadata_lcd
)
SELECT
address,
DATA,
_inserted_timestamp
FROM
keep_last_if_failed A
JOIN (
SELECT
MAX(RANK) max_rank
FROM
keep_last_if_failed
) b
ON A.rank = b.max_rank

View File

@ -0,0 +1,49 @@
{{ config(
materialized = 'table'
) }}
SELECT
blockchain,
creator,
address,
label_type,
label_subtype,
project_name AS label,
address_name AS address_name,
NULL AS raw_metadata
FROM
{{ source(
'crosschain',
'address_labels'
) }}
WHERE
blockchain = 'evmos'
UNION
SELECT
blockchain,
creator,
address,
label_type,
label_subtype,
project_name AS label,
label AS address_name,
raw_metadata
FROM
{{ ref('core__dim_tokens') }}
WHERE
blockchain = 'evmos'
UNION
SELECT
blockchain,
creator,
address,
label_type,
label_subtype,
project_name AS label,
label AS address_name,
NULL AS raw_metadata
FROM
{{ ref('core__fact_validators') }}
WHERE
blockchain = 'evmos'

View File

@ -0,0 +1,33 @@
version: 2
models:
- name: core__dim_labels
description: A hand curated table containing address names / labels for popular contracts, validators, tokens, etc.
columns:
- name: BLOCKCHAIN
description: The name of the blockchain
tests:
- not_null
- name: CREATOR
description: The name of the creator of the label
tests:
- not_null
- name: LABEL_TYPE
description: A high-level category describing the addresses main function or ownership
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_set:
value_set: ['flotsam', 'nft', 'defi', 'dex', 'cex', 'dapp', 'token', 'operator', 'layer2', 'chadmin']
- name: LABEL_SUBTYPE
description: A sub-category nested within label type providing further detail
tests:
- not_null
- name: LABEL
description: Name of the controlling entity of the address
tests:
- not_null
- name: ADDRESS
description: Address that the label is for. This is the field that should be used to join other tables with labels.
tests:
- not_null
- name: RAW_METADATA
description: A field available for tokens that contains decimal information

View File

@ -0,0 +1,26 @@
{{ config(
materialized = 'table'
) }}
SELECT
'evmos' AS blockchain,
address,
creator,
label_type,
label_subtype,
label,
project_name,
alias,
DECIMAL,
raw_metadata,
concat_ws(
'-',
address,
creator,
blockchain
) AS unique_key
FROM
{{ source(
'osmo',
'asset_metadata'
) }}

View File

@ -0,0 +1,49 @@
version: 2
models:
- name: core__dim_tokens
description: Contains token metadata for assets on the Evmos blockchain. This table was sourced from an API and may not contain every token.
columns:
- name: ADDRESS
description: "{{ doc('address') }}"
tests:
- dbt_expectations.expect_column_to_exist
- name: BLOCKCHAIN
description: "{{ doc('blockchain') }}"
tests:
- dbt_expectations.expect_column_to_exist
- name: CREATOR
description: "{{ doc('creator') }}"
tests:
- dbt_expectations.expect_column_to_exist
- name: LABEL_TYPE
description: "{{ doc('label_type') }}"
tests:
- dbt_expectations.expect_column_to_exist
- name: LABEL_SUBTYPE
description: "{{ doc('label_subtype') }}"
tests:
- dbt_expectations.expect_column_to_exist
- name: LABEL
description: "{{ doc('label') }}"
tests:
- dbt_expectations.expect_column_to_exist
- name: PROJECT_NAME
description: "{{ doc('project_name') }}"
tests:
- dbt_expectations.expect_column_to_exist
- name: ALIAS
description: A secondary address for the token, where available
tests:
- dbt_expectations.expect_column_to_exist
- name: DECIMAL
description: "{{ doc('decimal') }}"
tests:
- dbt_expectations.expect_column_to_exist
- name: RAW_METADATA
description: "{{ doc('raw_metadata') }}"
tests:
- dbt_expectations.expect_column_to_exist
- name: UNIQUE_KEY
description: The unique key of the table
tests:
- dbt_expectations.expect_column_to_exist

View File

@ -0,0 +1,13 @@
{{ config(
materialized = 'view'
) }}
SELECT
block_id,
block_timestamp,
chain_id,
tx_count,
proposer_address,
validator_hash
FROM
{{ ref('silver__blocks') }}

View File

@ -0,0 +1,29 @@
version: 2
models:
- name: core__fact_blocks
description: Records of all blocks that have occurred on Evmos. This schema is currently in lite mode, and contains data back to DATE.
columns:
- name: BLOCK_ID
description: "{{ doc('block_id') }}"
tests:
- dbt_expectations.expect_column_to_exist
- name: BLOCK_TIMESTAMP
description: "{{ doc('block_timestamp') }}"
tests:
- dbt_expectations.expect_column_to_exist
- name: CHAIN_ID
description: "{{ doc('chain_id') }}"
tests:
- dbt_expectations.expect_column_to_exist
- name: TX_COUNT
description: "{{ doc('tx_count') }}"
tests:
- dbt_expectations.expect_column_to_exist
- name: PROPOSER_ADDRESS
description: "{{ doc('proposer_address') }}"
tests:
- dbt_expectations.expect_column_to_exist
- name: VALIDATOR_HASH
description: "{{ doc('validator_hash') }}"
tests:
- dbt_expectations.expect_column_to_exist

View File

@ -0,0 +1,21 @@
{{ config(
materialized = 'view'
) }}
SELECT
block_id,
block_timestamp,
tx_id,
tx_succeeded,
CONCAT(
msg_group,
':',
msg_sub_group
) AS msg_group,
msg_index,
msg_type,
attribute_index,
attribute_key,
attribute_value
FROM
{{ ref('silver__msg_attributes') }}

View File

@ -0,0 +1,42 @@
version: 2
models:
- name: core__fact_msg_attributes
description: Records of all message attributes associated to messages that have occurred on Evmos. This schema is currently in lite mode, and contains data back to DATE.
columns:
- name: BLOCK_ID
description: "{{ doc('block_id') }}"
tests:
- dbt_expectations.expect_column_to_exist
- name: BLOCK_TIMESTAMP
description: "{{ doc('block_timestamp') }}"
tests:
- dbt_expectations.expect_column_to_exist
- name: TX_ID
description: "{{ doc('tx_id') }}"
tests:
- dbt_expectations.expect_column_to_exist
- name: TX_SUCCEEDED
description: "{{ doc('tx_succeeded') }}"
tests:
- dbt_expectations.expect_column_to_exist
- name: MSG_GROUP
description: "{{ doc('msg_group') }}"
tests:
- dbt_expectations.expect_column_to_exist
- name: MSG_INDEX
description: "{{ doc('msg_index') }}"
tests:
- dbt_expectations.expect_column_to_exist
- name: MSG_TYPE
description: "{{ doc('msg_type') }}"
tests:
- dbt_expectations.expect_column_to_exist
- name: ATTRIBUTE_KEY
description: "They key from the key-value pair from the message attribute"
tests:
- dbt_expectations.expect_column_to_exist
- name: ATTRIBUTE_VALUE
description: "They value from the key-value pair from the message attribute"
tests:
- dbt_expectations.expect_column_to_exist

View File

@ -0,0 +1,19 @@
{{ config(
materialized = 'view'
) }}
SELECT
block_id,
block_timestamp,
tx_id,
tx_succeeded,
CONCAT(
msg_group,
':',
msg_sub_group
) AS msg_group,
msg_index,
msg_type,
msg
FROM
{{ ref('silver__msgs') }}

View File

@ -0,0 +1,38 @@
version: 2
models:
- name: core__fact_msgs
description: Records of all message attributes associated to messages that have occurred on Evmos. This schema is currently in lite mode, and contains data back to DATE.
columns:
- name: BLOCK_ID
description: "{{ doc('block_id') }}"
tests:
- dbt_expectations.expect_column_to_exist
- name: BLOCK_TIMESTAMP
description: "{{ doc('block_timestamp') }}"
tests:
- dbt_expectations.expect_column_to_exist
- name: TX_ID
description: "{{ doc('tx_id') }}"
tests:
- dbt_expectations.expect_column_to_exist
- name: TX_SUCCEEDED
description: "{{ doc('tx_succeeded') }}"
tests:
- dbt_expectations.expect_column_to_exist
- name: MSG_GROUP
description: "{{ doc('msg_group') }}"
tests:
- dbt_expectations.expect_column_to_exist
- name: MSG_INDEX
description: "{{ doc('msg_index') }}"
tests:
- dbt_expectations.expect_column_to_exist
- name: MSG_TYPE
description: "{{ doc('msg_type') }}"
tests:
- dbt_expectations.expect_column_to_exist
- name: MSG
description: "A block of json that contains the message attributes in base64 encoding."
tests:
- dbt_expectations.expect_column_to_exist

View File

@ -0,0 +1,74 @@
{{ config(
materialized = 'incremental',
unique_key = "tx_id",
incremental_strategy = 'merge',
cluster_by = ['block_timestamp::DATE'],
) }}
WITH fee AS (
SELECT
tx_id,
attribute_value AS fee
FROM
{{ ref('silver__msg_attributes') }}
WHERE
attribute_key = 'fee'
{% if is_incremental() %}
AND _inserted_timestamp :: DATE >= CURRENT_DATE -2
{% endif %}
qualify(ROW_NUMBER() over(PARTITION BY tx_id
ORDER BY
msg_index)) = 1
),
spender AS (
SELECT
tx_id,
SPLIT_PART(
attribute_value,
'/',
0
) AS tx_from
FROM
{{ ref('silver__msg_attributes') }}
WHERE
attribute_key = 'acc_seq'
OR (msg_type = 'transfer'
AND attribute_key = 'sender')
{% if is_incremental() %}
AND _inserted_timestamp :: DATE >= CURRENT_DATE -2
{% endif %}
qualify(ROW_NUMBER() over(PARTITION BY tx_id
ORDER BY
msg_index)) = 1
)
SELECT
t.block_id,
t.block_timestamp,
t.tx_id,
s.tx_from,
tx_succeeded,
codespace,
COALESCE(
fee,
'0aevmos'
) AS fee,
gas_used,
gas_wanted,
tx_code
msgs
FROM
{{ ref('silver__transactions') }}
t
LEFT OUTER JOIN fee f
ON t.tx_id = f.tx_id
LEFT OUTER JOIN spender s
ON t.tx_id = s.tx_id
{% if is_incremental() %}
WHERE
_inserted_timestamp :: DATE >= CURRENT_DATE -2
{% endif %}

View File

@ -0,0 +1,50 @@
version: 2
models:
- name: core__fact_transactions
description: Records of all transactions that have occurred on Evmos. This schema is currently in lite mode, and contains data back to DATE.
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- TX_ID
columns:
- name: BLOCK_ID
description: "{{ doc('block_id') }}"
tests:
- dbt_expectations.expect_column_to_exist
- name: BLOCK_TIMESTAMP
description: "{{ doc('block_timestamp') }}"
tests:
- dbt_expectations.expect_column_to_exist
- name: TX_ID
description: "{{ doc('tx_id') }}"
tests:
- dbt_expectations.expect_column_to_exist
- name: TX_FROM
description: "{{ doc('tx_from') }}"
tests:
- not_null:
where: tx_succeeded = 'TRUE'
- name: TX_SUCCEEDED
description: "{{ doc('tx_succeeded') }}"
tests:
- dbt_expectations.expect_column_to_exist
- name: CODESPACE
description: "{{ doc('codespace') }}"
tests:
- dbt_expectations.expect_column_to_exist
- name: FEE
description: "{{ doc('fee') }}"
tests:
- not_null
- name: GAS_USED
description: "{{ doc('gas_used') }}"
tests:
- dbt_expectations.expect_column_to_exist
- name: GAS_WANTED
description: "{{ doc('gas_wanted') }}"
tests:
- dbt_expectations.expect_column_to_exist
- name: MSGS
description: "The underlying json from the messages or events within the transactions"
tests:
- dbt_expectations.expect_column_to_exist

View File

@ -0,0 +1,16 @@
{{ config(
materialized = 'view'
) }}
SELECT
block_id,
block_timestamp,
tx_id,
tx_succeeded,
transfer_type,
sender,
amount,
currency,
receiver
FROM
{{ ref('silver__transfers') }}

View File

@ -0,0 +1,41 @@
version: 2
models:
- name: core__fact_transfers
description: Records of all transfers on Evmos. This schema is currently in lite mode and contains data back to DATE.
columns:
- name: BLOCK_ID
description: "{{ doc('block_id') }}"
tests:
- dbt_expectations.expect_column_to_exist
- name: BLOCK_TIMESTAMP
description: "{{ doc('block_timestamp') }}"
tests:
- dbt_expectations.expect_column_to_exist
- name: TX_ID
description: "{{ doc('tx_id') }}"
tests:
- dbt_expectations.expect_column_to_exist
- name: TX_SUCCEEDED
description: "{{ doc('tx_succeeded') }}"
tests:
- dbt_expectations.expect_column_to_exist
- name: TRANSFER_TYPE
description: "{{ doc('transfer_type') }}"
tests:
- dbt_expectations.expect_column_to_exist
- name: SENDER
description: "{{ doc('sender') }}"
tests:
- dbt_expectations.expect_column_to_exist
- name: AMOUNT
description: "{{ doc('amount') }}"
tests:
- dbt_expectations.expect_column_to_exist
- name: CURRENCY
description: "{{ doc('currency') }}"
tests:
- dbt_expectations.expect_column_to_exist
- name: RECEIVER
description: "{{ doc('receiver') }}"
tests:
- dbt_expectations.expect_column_to_exist

View File

@ -0,0 +1,22 @@
{{ config(
materialized = 'view'
) }}
SELECT
address,
blockchain,
creator,
label_type,
label_subtype,
label,
project_name,
delegator_shares,
jailed,
rate,
max_change_rate,
max_rate,
min_self_delegation,
RANK,
raw_metadata
FROM
{{ ref('silver__validators') }}

View File

@ -0,0 +1,69 @@
version: 2
models:
- name: core__fact_validators
description: Information about all active and inactive set validators on Evmos.
columns:
- name: ADDRESS
description: "{{ doc('address') }}"
tests:
- dbt_expectations.expect_column_to_exist
- name: BLOCKCHAIN
description: "{{ doc('blockchain') }}"
tests:
- dbt_expectations.expect_column_to_exist
- name: CREATOR
description: "{{ doc('creator') }}"
tests:
- dbt_expectations.expect_column_to_exist
- name: LABEL_TYPE
description: "{{ doc('label_type') }}"
tests:
- dbt_expectations.expect_column_to_exist
- name: LABEL_SUBTYPE
description: "{{ doc('label_subtype') }}"
tests:
- dbt_expectations.expect_column_to_exist
- name: LABEL
description: "{{ doc('label') }}"
tests:
- dbt_expectations.expect_column_to_exist
- name: PROJECT_NAME
description: "{{ doc('project_name') }}"
tests:
- dbt_expectations.expect_column_to_exist
- name: DELEGATOR_SHARES
description: The number of tokens staked to the validator.
tests:
- dbt_expectations.expect_column_to_exist
- name: JAILED
description: FALSE when a validator is not jailed, TRUE when a validator is jailed
tests:
- dbt_expectations.expect_column_to_exist
- name: RATE
description: The current commission rate the validator is charging stakers.
tests:
- dbt_expectations.expect_column_to_exist
- name: MAX_CHANGE_RATE
description: The maximum rate at which a validator can change their commission per day.
tests:
- dbt_expectations.expect_column_to_exist
- name: MAX_RATE
description: The maximum commission rate that the validator can charge
tests:
- dbt_expectations.expect_column_to_exist
- name: MIN_SELF_DELEGATION
description: The minimum number of Evmos tokens that the operator must be staking with their own validator
tests:
- dbt_expectations.expect_column_to_exist
- name: RANK
description: The rank of the validator in the validator set. Rank is determined by the number of Evmos tokens staked to the validator
tests:
- dbt_expectations.expect_column_to_exist
- name: RAW_METADATA
description: "{{ doc('raw_metadata') }}"
tests:
- dbt_expectations.expect_column_to_exist
- name: UNIQUE_KEY
description: The unique key of the table
tests:
- dbt_expectations.expect_column_to_exist

View File

@ -1,10 +1,10 @@
{% docs __overview__ %}
# Welcome to the Flipside Crypto xyz Models Documentation
# Welcome to the Flipside Crypto EVMOS Models Documentation
## **What does this documentation cover?**
The documentation included here details the design of the xyz
tables and views available via [Flipside Crypto.](https://flipsidecrypto.xyz/) For more information on how these models are built, please see [the github repository.](https://github.com/flipsideCrypto/xyz-models/)
The documentation included here details the design of the evmos
tables and views available via [Flipside Crypto.](https://flipsidecrypto.xyz/) For more information on how these models are built, please see [the github repository.](https://github.com/flipsideCrypto/evmos-models/)
## **How do I use these docs?**
The easiest way to navigate this documentation is to use the Quick Links below. These links will take you to the documentation for each table, which contains a description, a list of the columns, and other helpful information.
@ -17,7 +17,7 @@ There is more information on how to use dbt docs in the last section of this doc
**Click on the links below to jump to the documentation for each schema.**
### Core Tables (`xyz`.`CORE`.`<table_name>`)
### Core Tables (`evmos`.`CORE`.`<table_name>`)
**Dimension Tables:**
@ -33,8 +33,7 @@ There is more information on how to use dbt docs in the last section of this doc
## **Data Model Overview**
The xyz
models are built a few different ways, but the core fact tables are built using three layers of sql models: **bronze, silver, and gold (or core).**
The evmos models are built a few different ways, but the core fact tables are built using three layers of sql models: **bronze, silver, and gold (or core).**
- Bronze: Data is loaded in from the source as a view
- Silver: All necessary parsing, filtering, de-duping, and other transformations are done here
@ -68,7 +67,7 @@ Note that you can also right-click on models to interactively filter and explore
- [Flipside](https://flipsidecrypto.xyz/)
- [Velocity](https://app.flipsidecrypto.com/velocity?nav=Discover)
- [Tutorials](https://docs.flipsidecrypto.com/our-data/tutorials)
- [Github](https://github.com/FlipsideCrypto/xyz-models)
- [Github](https://github.com/FlipsideCrypto/evmos-models)
- [What is dbt?](https://docs.getdbt.com/docs/introduction)
{% enddocs %}

View File

@ -0,0 +1,5 @@
{% docs _unique_key %}
The unique key for the table.
{% enddocs %}

View File

@ -0,0 +1,5 @@
{% docs address %}
Address unique to an individual wallet, validator, or token.
{% enddocs %}

View File

@ -0,0 +1,5 @@
{% docs amount %}
The amount that was used in the transaction message.
{% enddocs %}

View File

@ -0,0 +1,5 @@
{% docs block_id %}
The block height the block was recorded at.
{% enddocs %}

View File

@ -0,0 +1,5 @@
{% docs block_timestamp %}
The date and time at which the block began.
{% enddocs %}

View File

@ -0,0 +1,5 @@
{% docs blockchain %}
In this table, always Cosmos. Used to join to cross-chain tables.
{% enddocs %}

View File

@ -0,0 +1,5 @@
{% docs chain_id %}
The name and version of the blockchain.
{% enddocs %}

View File

@ -0,0 +1,5 @@
{% docs codespace %}
Namespace for the code.
{% enddocs %}

View File

@ -0,0 +1,5 @@
{% docs creator %}
Name of the label creator - for now, this will always be "Flipside."
{% enddocs %}

View File

@ -0,0 +1,5 @@
{% docs currency %}
The currency that was used in the transaction message.
{% enddocs %}

View File

@ -0,0 +1,5 @@
{% docs decimal %}
Divide the amount by POW(10, decimal) to get the amount used in the transaction. This value can be NULL, as decimals are hand curated from an outside source and not found on-chain.
{% enddocs %}

View File

@ -0,0 +1,5 @@
{% docs fee %}
The fee is paid by the initiator of the transaction. Fee = gas * gas price and is given in micro-OSMO.
{% enddocs %}

View File

@ -0,0 +1,5 @@
{% docs gas_used %}
The amount of gas consumed by the transaction.
{% enddocs %}

View File

@ -0,0 +1,5 @@
{% docs gas_wanted %}
Amount of gas requested for a transaction. It is provided by users when the transaction is generated.
{% enddocs %}

View File

@ -0,0 +1,5 @@
{% docs inserted_timestamp %}
The date and time at which the block or transaction was inserted in the bronze tables.
{% enddocs %}

View File

@ -0,0 +1,5 @@
{% docs label %}
The label or name of the address.
{% enddocs %}

View File

@ -0,0 +1,5 @@
{% docs label_subtype %}
Adds more detail to the label type.
{% enddocs %}

View File

@ -0,0 +1,5 @@
{% docs label_type %}
A broad category that describes what a label is representing.
{% enddocs %}

View File

@ -0,0 +1,5 @@
{% docs msg_group %}
Value grouping different messages together to represent a single action. Format will include the numeric msg_group and msg_sub_group with a ":" seperator. The subgroup will always be 0 except for "Exec" actions. NULL group means messages are related to the header (overall transaction)
{% enddocs %}

View File

@ -0,0 +1,5 @@
{% docs msg_index %}
Short for "message index," the position in which messages occur in a transaction.
{% enddocs %}

View File

@ -0,0 +1,5 @@
{% docs msg_sub_group %}
Silver only -- Numeric value grouping different messages together to represent a single action within a group. This is relevent for exec actions that contain mutiple underlying actions. NULL sub group means messages are related to the header (overall transaction)
{% enddocs %}

View File

@ -0,0 +1,5 @@
{% docs msg_type %}
A string containing information about the type of message occurring.
{% enddocs %}

View File

@ -0,0 +1,5 @@
{% docs project_name %}
The name of the project the label belongs to.
{% enddocs %}

View File

@ -0,0 +1,5 @@
{% docs proposer_address %}
The address of the validator that proposed the block.
{% enddocs %}

View File

@ -0,0 +1,5 @@
{% docs raw_metadata %}
Additional details about the validator or token in json format.
{% enddocs %}

View File

@ -0,0 +1,5 @@
{% docs receiver %}
The wallet address of the individual received tokens in the transfer.
{% enddocs %}

View File

@ -0,0 +1,5 @@
{% docs sender %}
The wallet address of the individual sent tokens in the transfer.
{% enddocs %}

View File

@ -0,0 +1,5 @@
{% docs transfer_type %}
Details on the type of transfer occurring during the transaction. "Transfer_In" = depositing tokens onto the Cosmos Hub. "Transfer_out" = withdrawing tokens from the Cosmos Hub. "Cosmos" = wallet to wallet transfer on the Cosmos Hub.
{% enddocs %}

View File

@ -0,0 +1,5 @@
{% docs tx_code %}
A number that corresponds to various error codes. When "0", the transaction is successful. Non-zero numbers signify different types of transaction failures.
{% enddocs %}

View File

@ -0,0 +1,5 @@
{% docs tx_count %}
The number of transactions that occurred during a block.
{% enddocs %}

View File

@ -0,0 +1,5 @@
{% docs tx_from %}
The wallet address of the individual who initiated the transaction
{% enddocs %}

View File

@ -0,0 +1,5 @@
{% docs tx_id %}
A unique key that identifies a transaction. Called "TxHash" on block explorers.
{% enddocs %}

View File

@ -0,0 +1,5 @@
{% docs tx_log %}
A string that contains the transaction logs, which are logs written by the program interacted with during the transaction.
{% enddocs %}

View File

@ -0,0 +1,5 @@
{% docs tx_succeeded %}
Transaction status is "TRUE" if the transaction went through, "FALSE" if the transaction failed.
{% enddocs %}

View File

@ -0,0 +1,5 @@
{% docs validator_hash %}
The root hash of the new validator set.
{% enddocs %}

View File

@ -0,0 +1,48 @@
{{ config(
materialized = 'incremental',
unique_key = "CONCAT_WS('-', chain_id, block_id)",
incremental_strategy = 'delete+insert',
cluster_by = ['block_timestamp::DATE'],
) }}
SELECT
block_id,
COALESCE(
DATA [0] :result :block :header :time :: TIMESTAMP,
DATA :result :block :header :time :: TIMESTAMP
) AS block_timestamp,
COALESCE(
DATA [0] :result :block :header :chain_id :: STRING,
DATA :result :block :header :chain_id :: STRING
) AS chain_id,
ARRAY_SIZE(
COALESCE(
data [0] :result :block :data :txs,
data :result :block :data :txs
)
) AS tx_count,
COALESCE(
data [0] :result :block :header :proposer_address :: STRING,
data :result :block :header :proposer_address :: STRING
) AS proposer_address,
COALESCE(
DATA [0] :result :block :header :validators_hash :: STRING,
DATA :result :block :header :validators_hash :: STRING
) AS validator_hash,
_inserted_timestamp :: TIMESTAMP AS _inserted_timestamp
FROM
{{ ref('bronze__tendermint_blocks') }}
WHERE
data [0] :error IS NULL
AND DATA :error IS NULL
AND DATA :result :block :header :chain_id :: STRING IS NOT NULL
AND DATA[0] :result :block :header :chain_id :: STRING IS NOT NULL
{% if is_incremental() %}
AND _inserted_timestamp :: DATE >= (
SELECT
MAX(_inserted_timestamp) :: DATE - 2
FROM
{{ this }}
)
{% endif %}

View File

@ -0,0 +1,70 @@
version: 2
models:
- name: silver__blocks
description: Records of all blocks that have occurred on Evmos, dating back to the genesis block.
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- CHAIN_ID
- BLOCK_ID
- sequence_gaps:
column_name: block_ID
where: BLOCK_TIMESTAMP <= CURRENT_DATE - 1
columns:
- name: BLOCK_ID
description: "{{ doc('block_id') }}"
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: BLOCK_TIMESTAMP
description: "{{ doc('block_timestamp') }}"
tests:
- not_null
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 1
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_NTZ
- name: CHAIN_ID
description: "{{ doc('chain_id') }}"
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- STRING
- VARCHAR
- name: TX_COUNT
description: "{{ doc('tx_count') }}"
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: PROPOSER_ADDRESS
description: "{{ doc('proposer_address') }}"
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- STRING
- VARCHAR
- name: VALIDATOR_HASH
description: "{{ doc('validator_hash') }}"
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- STRING
- VARCHAR
- name: _INSERTED_TIMESTAMP
description: "{{ doc('inserted_timestamp') }}"
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_NTZ

View File

@ -0,0 +1,41 @@
{{ config(
materialized = 'incremental',
unique_key = "CONCAT_WS('-', tx_id, msg_index, attribute_index)",
incremental_strategy = 'delete+insert',
cluster_by = ['block_timestamp::DATE'],
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION",
) }}
SELECT
block_id,
block_timestamp,
tx_id,
tx_succeeded,
msg_group,
msg_sub_group,
msg_index,
msg_type,
b.index AS attribute_index,
TRY_BASE64_DECODE_STRING(
b.value :key :: STRING
) AS attribute_key,
TRY_BASE64_DECODE_STRING(
b.value :value :: STRING
) AS attribute_value,
_inserted_timestamp
FROM
{{ ref('silver__msgs') }} A,
LATERAL FLATTEN(
input => A.msg,
path => 'attributes'
) b
{% if is_incremental() %}
WHERE
_inserted_timestamp :: DATE >= (
SELECT
MAX(_inserted_timestamp) :: DATE - 2
FROM
{{ this }}
)
{% endif %}

View File

@ -0,0 +1,70 @@
version: 2
models:
- name: silver__msg_attributes
description: Records of all messages associated to transactions that have occurred on Evmos, dating back to the genesis block.
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- TX_ID
- MSG_INDEX
- ATTRIBUTE_INDEX
columns:
- name: BLOCK_ID
description: "{{ doc('block_id') }}"
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: BLOCK_TIMESTAMP
description: "{{ doc('block_timestamp') }}"
tests:
- not_null
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 1
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_NTZ
- name: TX_ID
description: "{{ doc('tx_id') }}"
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- STRING
- VARCHAR
- name: MSG_GROUP
description: "{{ doc('msg_group') }}"
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: MSG_SUB_GROUP
description: "{{ doc('msg_sub_group') }}"
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: MSG_INDEX
description: "{{ doc('msg_index') }}"
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: MSG_TYPE
description: "{{ doc('msg_type') }}"
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- STRING
- VARCHAR
- name: MSG
description: "The underlying json from the message or event within the transactions"

View File

@ -0,0 +1,134 @@
{{ config(
materialized = 'incremental',
unique_key = "CONCAT_WS('-', tx_id, msg_index)",
incremental_strategy = 'delete+insert',
cluster_by = ['block_timestamp::DATE'],
) }}
WITH base AS (
SELECT
t.block_id,
t.block_timestamp,
t.tx_id,
t.gas_used,
t.gas_wanted,
t.tx_succeeded,
f.value AS msg,
f.index :: INT AS msg_index,
msg :type :: STRING AS msg_type,
IFF(
TRY_BASE64_DECODE_STRING(
msg :attributes [0] :key :: STRING
) = 'action',
TRUE,
FALSE
) AS is_action,
NULLIF(
(conditional_true_event(is_action) over (PARTITION BY tx_id
ORDER BY
msg_index ASC) -1),
-1
) AS msg_group,
IFF(
TRY_BASE64_DECODE_STRING(
msg :attributes [0] :key :: STRING
) = 'module',
TRUE,
FALSE
) AS is_module,
TRY_BASE64_DECODE_STRING(
msg :attributes [0] :key :: STRING
) AS attribute_key,
TRY_BASE64_DECODE_STRING(
msg :attributes [0] :value :: STRING
) AS attribute_value,
t._inserted_timestamp
FROM
{{ ref('silver__transactions') }} t,
LATERAL FLATTEN(input => msgs) f
{% if is_incremental() %}
WHERE
_inserted_timestamp :: DATE >= (
SELECT
MAX(_inserted_timestamp) :: DATE - 2
FROM
{{ this }}
)
{% endif %}
),
exec_actions AS (
SELECT
DISTINCT tx_id,
msg_group
FROM
base
WHERE
msg_type = 'message'
AND attribute_key = 'action'
AND LOWER(attribute_value) LIKE '%exec%'
),
GROUPING AS (
SELECT
base.tx_id,
base.msg_index,
RANK() over(
PARTITION BY base.tx_id,
base.msg_group
ORDER BY
base.msg_index
) -1 AS msg_sub_group
FROM
base
INNER JOIN exec_actions e
ON base.tx_id = e.tx_id
AND base.msg_group = e.msg_group
WHERE
base.is_module = 'TRUE'
AND base.msg_type = 'message'
),
FINAL AS (
SELECT
block_id,
block_timestamp,
A.tx_id,
tx_succeeded,
msg_group,
CASE
WHEN msg_group IS NULL THEN NULL
ELSE COALESCE(
LAST_VALUE(
b.msg_sub_group ignore nulls
) over(
PARTITION BY A.tx_id,
msg_group
ORDER BY
A.msg_index DESC rows unbounded preceding
),
0
)
END AS msg_sub_group,
A.msg_index,
msg_type,
msg,
_inserted_timestamp
FROM
base A
LEFT JOIN GROUPING b
ON A.tx_id = b.tx_id
AND A.msg_index = b.msg_index
)
SELECT
block_id,
block_timestamp,
tx_id,
tx_succeeded,
msg_group,
msg_sub_group,
msg_index,
msg_type,
msg,
_inserted_timestamp
FROM
FINAL

View File

@ -0,0 +1,76 @@
version: 2
models:
- name: silver__msgs
description: Records of all messages associated to transactions that have occurred on Evmos, dating back to the genesis block.
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- TX_ID
- MSG_INDEX
columns:
- name: BLOCK_ID
description: "{{ doc('block_id') }}"
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: BLOCK_TIMESTAMP
description: "{{ doc('block_timestamp') }}"
tests:
- not_null
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 1
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_NTZ
- name: TX_ID
description: "{{ doc('tx_id') }}"
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- STRING
- VARCHAR
- name: TX_SUCCEEDED
description: "{{ doc('tx_succeeded') }}"
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- BOOLEAN
- name: MSG_GROUP
description: "{{ doc('msg_group') }}"
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: MSG_SUB_GROUP
description: "{{ doc('msg_sub_group') }}"
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: MSG_INDEX
description: "{{ doc('msg_index') }}"
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: MSG_TYPE
description: "{{ doc('msg_type') }}"
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- STRING
- VARCHAR
- name: MSG
description: "The underlying json from the message or event within the transactions"

View File

@ -0,0 +1,75 @@
{{ config(
materialized = 'incremental',
unique_key = "tx_id",
incremental_strategy = 'delete+insert',
cluster_by = 'block_timestamp::DATE',
) }}
WITH base_transactions AS (
SELECT
block_id,
t.value :hash :: STRING AS tx_id,
t.value :tx_result :codespace AS codespace,
t.value :tx_result :gas_used :: NUMBER AS gas_used,
t.value :tx_result :gas_wanted :: NUMBER AS gas_wanted,
CASE
WHEN t.value :tx_result :code :: NUMBER = 0 THEN TRUE
ELSE FALSE
END AS tx_succeeded,
t.value :tx_result :code :: NUMBER AS tx_code,
t.value :tx_result :events AS msgs,
t.value :tx_result :log :: STRING AS tx_log,
_inserted_timestamp
FROM
{{ ref('bronze__tendermint_transactions') }},
TABLE(FLATTEN(DATA :result :txs)) t
{% if is_incremental() %}
WHERE
_inserted_timestamp :: DATE >= (
SELECT
MAX(_inserted_timestamp) :: DATE - 2
FROM
{{ this }}
)
{% endif %}
)
SELECT
t.block_id,
b.block_timestamp,
tx_id,
codespace,
gas_used,
gas_wanted,
tx_succeeded,
tx_code,
msgs,
tx_log,
t._inserted_timestamp,
concat_ws(
'-',
t.block_id,
tx_id
) AS unique_key
FROM
base_transactions t
LEFT OUTER JOIN {{ ref('silver__blocks') }}
b
ON t.block_id = b.block_id
{% if is_incremental() %}
WHERE
b._inserted_timestamp :: DATE >= (
SELECT
MAX(_inserted_timestamp) :: DATE - 2
FROM
{{ this }}
)
{% endif %}
qualify ROW_NUMBER() over (
PARTITION BY tx_id
ORDER BY
t._inserted_timestamp DESC
) = 1

View File

@ -0,0 +1,87 @@
version: 2
models:
- name: silver__transactions
description: Records of all transactions that have occurred on Evmos, dating back to the genesis block.
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- TX_ID
columns:
- name: BLOCK_ID
description: "{{ doc('block_id') }}"
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: BLOCK_TIMESTAMP
description: "{{ doc('block_timestamp') }}"
tests:
- not_null
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 1
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_NTZ
- name: TX_ID
description: "{{ doc('tx_id') }}"
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- STRING
- VARCHAR
- name: CODESPACE
description: "{{ doc('codespace') }}"
- name: GAS_USED
description: "{{ doc('gas_used') }}"
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: GAS_WANTED
description: "{{ doc('gas_wanted') }}"
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: TX_SUCCEEDED
description: "{{ doc('tx_succeeded') }}"
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- BOOLEAN
- name: TX_CODE
description: "{{ doc('tx_code') }}"
tests:
- not_null:
where: TX_SUCCEEDED
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: MSGS
description: "The underlying json from the messages or events within the transactions"
tests:
- not_null
- name: TX_TYPE
description: "The transaction type"
- name: TX_LOG
description: "{{ doc('tx_log') }}"
tests:
- not_null:
where: TX_SUCCEEDED
- name: _INSERTED_TIMESTAMP
description: "{{ doc('inserted_timestamp') }}"
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_NTZ

View File

@ -0,0 +1,362 @@
{{ config(
materialized = 'incremental',
unique_key = "CONCAT_WS('-', tx_id, block_id, msg_index, currency)",
incremental_strategy = 'delete+insert',
cluster_by = 'block_timestamp::DATE',
) }}
WITH evmos_txs AS (
SELECT
DISTINCT tx_id
FROM
{{ ref('silver__msg_attributes') }}
WHERE
attribute_value IN (
'/cosmos.bank.v1beta1.MsgSend',
'/cosmos.bank.v1beta1.MsgMultiSend',
'/ibc.applications.transfer.v1.MsgTransfer'
)
{% if is_incremental() %}
AND _inserted_timestamp :: DATE >= (
SELECT
MAX(_inserted_timestamp) :: DATE - 2
FROM
{{ this }}
)
{% endif %}
),
sender_index AS (
SELECT
tx_id,
MIN(msg_index) AS msg_index
FROM
{{ ref('silver__msg_attributes') }}
WHERE
msg_type = 'tx'
AND attribute_key = 'acc_seq'
{% if is_incremental() %}
AND _inserted_timestamp :: DATE >= (
SELECT
MAX(_inserted_timestamp) :: DATE - 2
FROM
{{ this }}
)
{% endif %}
GROUP BY
tx_id
),
sender AS (
SELECT
m.block_id,
m.tx_id,
s.msg_index,
SPLIT_PART(
attribute_value,
'/',
0
) AS sender
FROM
{{ ref('silver__msg_attributes') }}
m
INNER JOIN sender_index s
ON m.tx_id = s.tx_id
AND m.msg_index = s.msg_index
WHERE
msg_type = 'tx'
AND attribute_key = 'acc_seq'
{% if is_incremental() %}
AND _inserted_timestamp :: DATE >= (
SELECT
MAX(_inserted_timestamp) :: DATE - 2
FROM
{{ this }}
)
{% endif %}
),
msg_index AS (
SELECT
m.block_id,
v.tx_id,
attribute_key,
m.msg_index
FROM
evmos_txs v
LEFT OUTER JOIN {{ ref('silver__msg_attributes') }}
m
ON v.tx_id = m.tx_id
INNER JOIN sender s
ON v.tx_id = s.tx_id
AND m.block_id = s.block_id
WHERE
msg_type = 'transfer'
AND attribute_key = 'amount'
AND m.msg_index > s.msg_index
{% if is_incremental() %}
AND _inserted_timestamp :: DATE >= (
SELECT
MAX(_inserted_timestamp) :: DATE - 2
FROM
{{ this }}
)
{% endif %}
),
receiver AS (
SELECT
m.block_id,
v.tx_id,
m.msg_index,
attribute_value AS receiver
FROM
evmos_txs v
LEFT OUTER JOIN {{ ref('silver__msg_attributes') }}
m
ON v.tx_id = m.tx_id
INNER JOIN sender s
ON v.tx_id = s.tx_id
AND m.block_id = s.block_id
WHERE
msg_type = 'ibc_transfer'
OR msg_type = 'transfer'
AND attribute_key = 'recipient'
AND m.msg_index > s.msg_index
{% if is_incremental() %}
AND _inserted_timestamp :: DATE >= (
SELECT
MAX(_inserted_timestamp) :: DATE - 2
FROM
{{ this }}
)
{% endif %}
),
amount AS (
SELECT
m.block_id,
v.tx_id,
m.msg_index,
COALESCE(
SPLIT_PART(
TRIM(
REGEXP_REPLACE(
attribute_value,
'[^[:digit:]]',
' '
)
),
' ',
0
),
TRY_PARSE_JSON(attribute_value) :amount
) AS amount,
COALESCE(
RIGHT(attribute_value, LENGTH(attribute_value) - LENGTH(SPLIT_PART(TRIM(REGEXP_REPLACE(attribute_value, '[^[:digit:]]', ' ')), ' ', 0))),
TRY_PARSE_JSON(attribute_value) [1] :denom
) AS currency
FROM
evmos_txs v
LEFT OUTER JOIN {{ ref('silver__msg_attributes') }}
m
ON v.tx_id = m.tx_id
INNER JOIN sender s
ON v.tx_id = s.tx_id
AND m.block_id = s.block_id
WHERE
msg_type = 'transfer'
AND attribute_key = 'amount'
AND m.msg_index > s.msg_index
{% if is_incremental() %}
AND _inserted_timestamp :: DATE >= (
SELECT
MAX(_inserted_timestamp) :: DATE - 2
FROM
{{ this }}
)
{% endif %}
),
evmos_txs_final AS (
SELECT
r.block_id,
block_timestamp,
r.tx_id,
tx_succeeded,
'EVMOS' AS transfer_type,
r.msg_index,
sender,
amount,
currency,
receiver,
_inserted_timestamp
FROM
receiver r
LEFT OUTER JOIN amount C
ON r.tx_id = C.tx_id
AND r.block_id = C.block_id
AND r.msg_index = C.msg_index
LEFT OUTER JOIN sender s
ON r.tx_id = s.tx_id
AND r.block_id = s.block_id
LEFT OUTER JOIN {{ ref('silver__transactions') }}
t
ON r.tx_id = t.tx_id
AND r.block_id = t.block_id
WHERE
amount IS NOT NULL
AND sender IS NOT NULL
{% if is_incremental() %}
AND _inserted_timestamp :: DATE >= (
SELECT
MAX(_inserted_timestamp) :: DATE - 2
FROM
{{ this }}
)
{% endif %}
),
ibc_in_tx AS (
SELECT
block_id,
block_timestamp,
tx_id,
tx_succeeded,
msg_index,
'IBC_TRANSFER_IN' AS transfer_type,
TRY_PARSE_JSON(attribute_value) :sender :: STRING AS sender,
TRY_PARSE_JSON(attribute_value) :amount :: INT AS amount,
CASE
WHEN TRY_PARSE_JSON(attribute_value) :denom :: STRING LIKE '%/%' THEN SPLIT(TRY_PARSE_JSON(attribute_value) :denom :: STRING, '/') [array_size(split(try_parse_json(attribute_value):denom::string, '/')) - 1]
ELSE TRY_PARSE_JSON(attribute_value) :denom :: STRING
END AS currency,
TRY_PARSE_JSON(attribute_value) :receiver :: STRING AS receiver,
_inserted_timestamp
FROM
{{ ref('silver__msg_attributes') }}
WHERE
msg_type = 'write_acknowledgement'
AND attribute_key = 'packet_data'
AND TRY_PARSE_JSON(attribute_value): amount IS NOT NULL
{% if is_incremental() %}
AND _inserted_timestamp :: DATE >= (
SELECT
MAX(_inserted_timestamp) :: DATE - 2
FROM
{{ this }}
)
{% endif %}
),
ibc_out_txid AS (
SELECT
tx_id
FROM
{{ ref('silver__msg_attributes') }}
WHERE
msg_type = 'ibc_transfer'
{% if is_incremental() %}
AND _inserted_timestamp :: DATE >= (
SELECT
MAX(_inserted_timestamp) :: DATE - 2
FROM
{{ this }}
)
{% endif %}
),
ibc_out_tx AS (
SELECT
block_id,
block_timestamp,
tx_id,
tx_succeeded,
msg_index,
'IBC_TRANSFER_OUT' AS transfer_type,
TRY_PARSE_JSON(attribute_value) :sender :: STRING AS sender,
TRY_PARSE_JSON(attribute_value) :amount :: INT AS amount,
CASE
WHEN TRY_PARSE_JSON(attribute_value) :denom :: STRING LIKE '%/%' THEN SPLIT(TRY_PARSE_JSON(attribute_value) :denom :: STRING, '/') [array_size(split(try_parse_json(attribute_value):denom::string, '/')) - 1]
ELSE TRY_PARSE_JSON(attribute_value) :denom :: STRING
END AS currency,
TRY_PARSE_JSON(attribute_value) :receiver :: STRING AS receiver,
_inserted_timestamp
FROM
{{ ref('silver__msg_attributes') }}
WHERE
tx_id IN (
SELECT
tx_id
FROM
ibc_out_txid
)
AND msg_type = 'send_packet'
AND attribute_key = 'packet_data'
{% if is_incremental() %}
AND _inserted_timestamp :: DATE >= (
SELECT
MAX(_inserted_timestamp) :: DATE - 2
FROM
{{ this }}
)
{% endif %}
),
ibc_transfers_agg AS (
SELECT
*
FROM
ibc_out_tx
UNION ALL
SELECT
*
FROM
ibc_in_tx
),
ibc_tx_final AS (
SELECT
i.block_id,
i.block_timestamp,
i.tx_id,
i.tx_succeeded,
i.transfer_type,
i.sender,
i.amount,
i.currency,
i.receiver,
msg_index,
_inserted_timestamp
FROM
ibc_transfers_agg i
)
SELECT
block_id,
block_timestamp,
tx_id,
tx_succeeded,
transfer_type,
sender,
amount,
currency,
receiver,
msg_index,
_inserted_timestamp
FROM
ibc_tx_final
UNION ALL
SELECT
block_id,
block_timestamp,
tx_id,
tx_succeeded,
transfer_type,
sender,
amount,
currency,
receiver,
msg_index,
_inserted_timestamp
FROM
evmos_txs_final

View File

@ -0,0 +1,92 @@
version: 2
models:
- name: silver__transfers
description: Records of all transfers on the Evmos, including IBC transfers as on- and off-ramps to Evmos and wallet to wallet transfers. This schema is in lite mode and contains data back to DATE.
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- TX_ID
- BLOCK_ID
- MSG_INDEX
- CURRENCY
columns:
- name: BLOCK_ID
description: "{{ doc('block_id') }}"
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: BLOCK_TIMESTAMP
description: "{{ doc('block_timestamp') }}"
tests:
- not_null
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 1
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_NTZ
- name: TX_ID
description: "{{ doc('tx_id') }}"
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- STRING
- VARCHAR
- name: TX_SUCCEEDED
description: "{{ doc('tx_succeeded') }}"
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- BOOLEAN
- name: TRANSFER_TYPE
description: "{{ doc('transfer_type') }}"
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- STRING
- VARCHAR
- name: SENDER
description: "{{ doc('sender') }}"
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- STRING
- VARCHAR
- name: AMOUNT
description: "{{ doc('amount') }}"
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: CURRENCY
description: "{{ doc('currency') }}"
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- STRING
- VARCHAR
- name: RECEIVER
description: "{{ doc('receiver') }}"
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- STRING
- VARCHAR
- name: _INSERTED_TIMESTAMP
description: "{{ doc('inserted_timestamp') }}"
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_NTZ

View File

@ -0,0 +1,26 @@
{{ config(
materialized = 'table'
) }}
SELECT
address,
'evmos' AS blockchain,
'flipside' AS creator,
'operator' AS label_type,
'validator' AS label_subtype,
DATA :description :moniker :: STRING AS label,
DATA :description :identity :: STRING AS project_name,
DATA :delegator_shares :: INT AS delegator_shares,
DATA :jailed :: BOOLEAN AS jailed,
DATA :commission :commission_rates :rate :: FLOAT AS rate,
DATA :commission :commission_rates :max_change_rate :: FLOAT AS max_change_rate,
DATA :commission :commission_rates :max_rate :: FLOAT AS max_rate,
DATA :min_self_delegation :: INT AS min_self_delegation,
RANK() over (
PARTITION BY address
ORDER BY
DATA :delegator_shares :: INT DESC
) AS RANK,
DATA AS raw_metadata
FROM
{{ ref('bronze_api__get_validator_metadata_lcd') }}

View File

@ -0,0 +1,139 @@
version: 2
models:
- name: silver__validators
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- ADDRESS
- CREATOR
- BLOCKCHAIN
columns:
- name: ADDRESS
description: "{{ doc('address') }}"
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- STRING
- VARCHAR
- name: BLOCKCHAIN
description: "{{ doc('blockchain') }}"
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- STRING
- VARCHAR
- dbt_expectations.expect_column_values_to_be_in_set:
value_set: ['evmos']
- name: CREATOR
description: "{{ doc('creator') }}"
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- STRING
- VARCHAR
- name: LABEL_TYPE
description: "{{ doc('label_type') }}"
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- STRING
- VARCHAR
- dbt_expectations.expect_column_values_to_be_in_set:
value_set: ['operator']
- name: LABEL_SUBTYPE
description: "{{ doc('label_subtype') }}"
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- STRING
- VARCHAR
- dbt_expectations.expect_column_values_to_be_in_set:
value_set: ['validator']
- name: LABEL
description: "{{ doc('label') }}"
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- STRING
- VARCHAR
- name: PROJECT_NAME
description: "{{ doc('project_name') }}"
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- STRING
- VARCHAR
- name: DELEGATOR_SHARES
description: The number of tokens staked to the validator.
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: JAILED
description: FALSE when a validator is not jailed, TRUE when a validator is jailed
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- BOOLEAN
- name: RATE
description: The current commission rate the validator is charging stakers.
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: MAX_CHANGE_RATE
description: The maximum rate at which a validator can change their commission per day.
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: MAX_RATE
description: The maximum commission rate that the validator can charge
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: MIN_SELF_DELEGATION
description: The minimum number of Evmos tokens that the operator must be staking with their own validator
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: RANK
description: The rank of the validator in the validator set. Rank is determined by the number of Evmos tokens staked to the validator
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: RAW_METADATA
description: "{{ doc('raw_metadata') }}"
tests:
- not_null
- name: UNIQUE_KEY
description: The unique key of the table
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- STRING
- VARCHAR

View File

@ -2,9 +2,38 @@ version: 2
sources:
- name: crosschain
database: "{{ 'crosschain' if target.database == 'xyz' else 'crosschain_dev' }}"
database: "{{ 'crosschain' if target.database == 'evmos' else 'crosschain_dev' }}"
schema: core
tables:
- name: dim_date_hours
- name: address_tags
- name: address_labels
- name: dim_dates
- name: bronze_streamline
database: streamline
schema: |
{{ "EVMOS_DEV" if var("STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES", False) else "EVMOS" }}
tables:
- name: eth_blocks
- name: eth_transactions
- name: tendermint_blocks
- name: tendermint_transactions
- name: tendermint_validators
- name: address_labels
- name: bronze
database: evmos
schema: bronze
tables:
- name: sample_blocks
- name: sample_txs
- name: streamline
database: streamline
schema: evmos
tables:
- name: tendermint_blocks
- name: tendermint_transactions
- name: osmo
database: osmosis
schema: silver
tables:
- name: asset_metadata

View File

@ -0,0 +1,30 @@
-- depends_on: {{ ref('bronze__streamline_eth_blocks') }}
{{ config (
materialized = "incremental",
unique_key = "id",
cluster_by = "ROUND(block_number, -3)",
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(id)"
) }}
SELECT
id,
block_number,
_inserted_timestamp
FROM
{% if is_incremental() %}
{{ ref('bronze__streamline_eth_blocks') }}
WHERE
_inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) _inserted_timestamp
FROM
{{ this }}
)
{% else %}
{{ ref('bronze__streamline_FR_eth_blocks') }}
{% endif %}
qualify(ROW_NUMBER() over (PARTITION BY id
ORDER BY
_inserted_timestamp DESC)) = 1

View File

@ -0,0 +1,30 @@
-- depends_on: {{ ref('bronze__streamline_eth_transactions') }}
{{ config (
materialized = "incremental",
unique_key = "id",
cluster_by = "ROUND(block_number, -3)",
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(id)"
) }}
SELECT
id,
block_number,
_inserted_timestamp
FROM
{% if is_incremental() %}
{{ ref('bronze__streamline_eth_transactions') }}
WHERE
_inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) _inserted_timestamp
FROM
{{ this }}
)
{% else %}
{{ ref('bronze__streamline_FR_eth_transactions') }}
{% endif %}
qualify(ROW_NUMBER() over (PARTITION BY id
ORDER BY
_inserted_timestamp DESC)) = 1

View File

@ -0,0 +1,30 @@
-- depends_on: {{ ref('bronze__streamline_tendermint_blocks') }}
{{ config (
materialized = "incremental",
unique_key = "id",
cluster_by = "ROUND(block_number, -3)",
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(id)"
) }}
SELECT
id,
block_number,
_inserted_timestamp
FROM
{% if is_incremental() %}
{{ ref('bronze__streamline_tendermint_blocks') }}
WHERE
_inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) _inserted_timestamp
FROM
{{ this }}
)
{% else %}
{{ ref('bronze__streamline_FR_tendermint_blocks') }}
{% endif %}
qualify(ROW_NUMBER() over (PARTITION BY id
ORDER BY
_inserted_timestamp DESC)) = 1

View File

@ -0,0 +1,30 @@
-- depends_on: {{ ref('bronze__streamline_tendermint_transactions') }}
{{ config (
materialized = "incremental",
unique_key = "id",
cluster_by = "ROUND(block_number, -3)",
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(id)"
) }}
SELECT
id,
block_number,
_inserted_timestamp
FROM
{% if is_incremental() %}
{{ ref('bronze__streamline_tendermint_transactions') }}
WHERE
_inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) _inserted_timestamp
FROM
{{ this }}
)
{% else %}
{{ ref('bronze__streamline_FR_tendermint_transactions') }}
{% endif %}
qualify(ROW_NUMBER() over (PARTITION BY id
ORDER BY
_inserted_timestamp DESC)) = 1

Some files were not shown because too many files have changed in this diff Show More