Merge branch 'main' into intents-labels

Merge Main
This commit is contained in:
Jack Forgash 2025-03-18 09:04:47 -06:00
commit 5d80052e08
115 changed files with 1579 additions and 3434 deletions

View File

@ -47,7 +47,7 @@ jobs:
git checkout -B docs origin/main
- name: generate dbt docs
run: dbt docs generate -t prod
run: dbt docs generate -t prod --no-compile
- name: move files to docs directory
run: |

View File

@ -42,10 +42,8 @@ jobs:
- name: Run DBT Jobs
run: |
dbt run-operation dispatch_github_workflow --args "{'repo_name': 'streamline-snowflake', 'workflow_name': 'dbt_run_near_external_table_update', 'gb_id': '${{ secrets.GB_ID}}'}";
dbt seed;
dbt run -s tag:scheduled_core --vars "{ 'RECEIPT_MAP_LOOKBACK_HOURS': 1.25}";
dbt run -s tag:scheduled_non_core models/gold;
dbt run -s tag:scheduled_core tag:scheduled_non_core models/gold;
- name: Store logs
uses: actions/upload-artifact@v4
with:

View File

@ -42,7 +42,6 @@ jobs:
- name: Run DBT Jobs
run: |
dbt run-operation dispatch_github_workflow --args "{'repo_name': 'streamline-snowflake', 'workflow_name': 'dbt_run_near_external_table_update', 'gb_id': '${{ secrets.GB_ID}}'}";
dbt run -s tag:scheduled_non_core models/gold;
- name: Store logs

View File

@ -2,7 +2,7 @@
# and underscores. A good package name should reflect your organization's
# name or the intended use of these models
name: "near_models"
version: "1.3.0"
version: "2.0.0"
config-version: 2
# This setting configures which "profile" dbt uses for this project.
@ -66,9 +66,10 @@ vars:
MANUAL_FIX: False
OBSERV_FULL_TEST: False
DBT_FULL_TEST: False
STREAMLINE_LOAD_LOOKBACK_HOURS: 3
RECEIPT_MAP_LOOKBACK_HOURS: 6
IS_MIGRATION: False
STREAMLINE_LOAD_LOOKBACK_HOURS: 3 # todo can deprecate
RECEIPT_MAP_LOOKBACK_HOURS: 6 # todo can deprecate
IS_MIGRATION: False # todo can deprecate
NEAR_MIGRATE_ARCHIVE: False
HEAL_MODELS: []
core_folders: [
'silver/streamline',

View File

@ -1,4 +1,9 @@
{% macro get_merge_sql(target, source, unique_key, dest_columns, incremental_predicates) -%}
{% set merge_sql = fsc_utils.get_merge_sql(target, source, unique_key, dest_columns, incremental_predicates) %}
{% if incremental_predicates[0] == "dynamic_range_predicate_custom" %}
{% set predicates = [dynamic_range_predicate_custom(source, incremental_predicates[1], "DBT_INTERNAL_DEST")] %}
{% set merge_sql = fsc_utils.get_merge_sql(target, source, unique_key, dest_columns, predicates) %}
{% else %}
{% set merge_sql = fsc_utils.get_merge_sql(target, source, unique_key, dest_columns, incremental_predicates) %}
{% endif %}
{{ return(merge_sql) }}
{% endmacro %}

View File

@ -0,0 +1,117 @@
{% macro dynamic_range_predicate_custom(source, predicate_column, output_alias="") -%}
{% set supported_data_types = ["INTEGER","DATE"] %}
{% set predicate_column_data_type_query %}
SELECT typeof({{predicate_column}}::variant)
FROM {{ source }}
WHERE {{predicate_column}} IS NOT NULL
LIMIT 1;
{% endset %}
{% set predicate_column_data_type_result = run_query(predicate_column_data_type_query) %}
{% if predicate_column_data_type_result.rows|length == 0 %}
{{ return('1=1') }}
{% endif %}
{% set predicate_column_data_type = predicate_column_data_type_result.columns[0].values()[0] %}
{% if predicate_column_data_type not in supported_data_types %}
{{ exceptions.raise_compiler_error("Data type of "~ predicate_column_data_type ~" is not supported, use one of "~ supported_data_types ~" column instead") }}
{% endif %}
{% set get_start_end_query %}
SELECT
MIN(
{{ predicate_column }}
) AS full_range_start,
MAX(
{{ predicate_column }}
) AS full_range_end
FROM
{{ source }}
{% endset %}
{% set start_end_results = run_query(get_start_end_query).columns %}
{% set start_preciate_value = start_end_results[0].values()[0] %}
{% set end_predicate_value = start_end_results[1].values()[0] %}
{% set get_limits_query %}
WITH block_range AS (
{% if predicate_column_data_type == "INTEGER" %}
SELECT
SEQ4() + {{ start_preciate_value }} as predicate_value
FROM
TABLE(GENERATOR(rowcount => {{ end_predicate_value - start_preciate_value }}+1))
{% else %}
SELECT
date_day as predicate_value
FROM
crosschain.core.dim_dates
WHERE
date_day BETWEEN '{{ start_preciate_value }}' AND '{{ end_predicate_value }}'
{% endif %}
),
partition_block_counts AS (
SELECT
b.predicate_value,
COUNT(r.{{ predicate_column }}) AS count_in_window
FROM
block_range b
LEFT OUTER JOIN {{ source }}
r
ON r.{{ predicate_column }} = b.predicate_value
GROUP BY
1
),
range_groupings AS (
SELECT
predicate_value,
count_in_window,
conditional_change_event(
count_in_window > 0
) over (
ORDER BY
predicate_value
) AS group_val
FROM
partition_block_counts
),
contiguous_ranges AS (
SELECT
MIN(predicate_value) AS start_value,
MAX(predicate_value) AS end_value
FROM
range_groupings
WHERE
count_in_window > 0
GROUP BY
group_val
),
between_stmts AS (
SELECT
CONCAT(
'{{ output_alias~"." if output_alias else "" }}',
'{{ predicate_column }} between \'',
start_value,
'\' and \'',
end_value,
'\''
) AS b
FROM
contiguous_ranges
)
SELECT
CONCAT('(', LISTAGG(b, ' OR '), ')', ' OR ', '(', '{{ output_alias~"." if output_alias else "" }}{{ predicate_column }} is null', ')')
FROM
between_stmts
{% endset %}
{% set between_stmts = run_query(get_limits_query).columns[0].values()[0] %}
{% if between_stmts != '()' %}
/* in case empty update array */
{% set predicate_override = between_stmts %}
{% else %}
{% set predicate_override = '1=1' %}
/* need to have something or it will error since it expects at least 1 predicate */
{% endif %}
{{ return(predicate_override) }}
{% endmacro %}

View File

@ -1,5 +1,6 @@
{% macro partition_load_manual(
scope = 'no_buffer'
scope = 'no_buffer',
partition_field = '_partition_by_block_number'
) %}
{# if range_start and range_end not set in cli, default to earliest rpc data #}
{% set range_start = var(
@ -19,17 +20,17 @@
1
) %}
{% if scope == 'front' %}
_partition_by_block_number BETWEEN {{ range_start }} - (
{{ partition_field }} BETWEEN {{ range_start }} - (
10000 * {{ front_buffer }}
)
AND {{ range_end }}
{% elif scope == 'end' %}
_partition_by_block_number BETWEEN {{ range_start }}
{{ partition_field }} BETWEEN {{ range_start }}
AND {{ range_end }} + (
10000 * {{ end_buffer }}
) {% elif scope == 'no_buffer' %}
_partition_by_block_number BETWEEN {{ range_start }}
{{ partition_field }} BETWEEN {{ range_start }}
AND {{ range_end }}
{% else %}
TRUE

View File

@ -0,0 +1,5 @@
{% docs chunks_json %}
A JSON containing an array of chunk headers contained in this block.
{% enddocs %}

View File

@ -0,0 +1,5 @@
{% docs header_json %}
A JSON containing the block header.
{% enddocs %}

View File

@ -0,0 +1,3 @@
{% docs outcome_json %}
JSON object containing the outcome of the receipt or transaction execution, including gas usage, status, and logs.
{% enddocs %}

View File

@ -0,0 +1,3 @@
{% docs receipt_json %}
JSON object containing the full receipt data including actions, predecessor, receiver, and other metadata.
{% enddocs %}

View File

@ -0,0 +1,3 @@
{% docs status_json %}
JSON object containing the status of the transaction, including success or failure information.
{% enddocs %}

View File

@ -0,0 +1,3 @@
{% docs transaction_json %}
JSON object containing the full transaction data including actions, signer, receiver, and other metadata.
{% enddocs %}

View File

@ -1,34 +0,0 @@
{{ config(
materialized = 'view',
secure = false,
meta={
'database_tags':{
'table': {
'PURPOSE': 'ATLAS'
}
}
},
tags = ['atlas']
) }}
WITH nft_data AS (
SELECT
atlas_nft_table_id AS ez_nft_contract_metrics_id,
receiver_id,
tokens,
transfers_24h,
transfers_3d,
all_transfers,
owners,
transactions,
mints,
COALESCE(inserted_timestamp, '2000-01-01' :: TIMESTAMP_NTZ) AS inserted_timestamp,
COALESCE(modified_timestamp, '2000-01-01' :: TIMESTAMP_NTZ) AS modified_timestamp
FROM
{{ ref('silver__atlas_nft_table') }}
)
SELECT
*
FROM
nft_data

View File

@ -1,99 +0,0 @@
version: 2
models:
- name: atlas__ez_nft_contract_metrics
description: |-
Deprecating Soon - these tables previously supported NEAR Atlas which is no longer live. These tables will be removed on Febreary 1, 2025.
Please cease using these tables.
tests:
- dbt_utils.recency:
datepart: days
field: inserted_timestamp
interval: 1
columns:
- name: EZ_NFT_CONTRACT_METRICS_ID
description: "{ { doc('id')}}"
tests:
- unique:
where: inserted_timestamp BETWEEN SYSDATE() - INTERVAL '7 days' AND SYSDATE() - INTERVAL '2 hours'
- not_null:
where: inserted_timestamp BETWEEN SYSDATE() - INTERVAL '7 days' AND SYSDATE() - INTERVAL '2 hours'
- name: RECEIVER_ID
description: "{ { doc('receiver_id')}}"
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- STRING
- VARCHAR
- name: TOKENS
description: "{{ doc('tokens_count')}}"
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- INTEGER
- name: TRANSFERS_24H
description: "The count of 'nft_transfer' transactions that occurred in the last 24 hours."
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- INTEGER
- name: TRANSFERS_3D
description: "The count of 'nft_transfer' transactions that occurred in the last 3 days."
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- INTEGER
- name: ALL_TRANSFERS
description: "{{ doc('all_transfers')}}"
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- INTEGER
- name: OWNERS
description: "{{ doc('owner_count')}}"
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- INTEGER
- name: TRANSACTIONS
description: "{{ doc('tx_count')}}"
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- INTEGER
- name: MINTS
description: "{{ doc('mint_count')}}"
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- INTEGER
- name: INSERTED_TIMESTAMP
description: "{{ doc('inserted_timestamp')}}"
- name: MODIFIED_TIMESTAMP
description: "{{ doc('modified_timestamp')}}"

View File

@ -1,33 +0,0 @@
{{ config(
materialized = 'view',
secure = false,
meta={
'database_tags':{
'table': {
'PURPOSE': 'ATLAS'
}
}
},
tags = ['atlas']
) }}
WITH nft_detailed AS (
SELECT
atlas_nft_detailed_id AS ez_nft_contract_metrics_daily_id,
DAY,
receiver_id,
tokens,
all_transfers,
owners,
transactions,
mints,
COALESCE(inserted_timestamp, '2000-01-01' :: TIMESTAMP_NTZ) AS inserted_timestamp,
COALESCE(modified_timestamp, '2000-01-01' :: TIMESTAMP_NTZ) AS modified_timestamp
FROM
{{ ref('silver__atlas_nft_detailed') }}
)
SELECT
*
FROM
nft_detailed

View File

@ -1,86 +0,0 @@
version: 2
models:
- name: atlas__ez_nft_contract_metrics_daily
description: |-
Deprecating Soon - these tables previously supported NEAR Atlas which is no longer live. These tables will be removed on Febreary 1, 2025.
Please cease using these tables.
tests:
- dbt_utils.recency:
datepart: days
field: inserted_timestamp
interval: 1
columns:
- name: EZ_NFT_CONTRACT_METRICS_DAILY_ID
description: "{{ doc('id')}}"
tests:
- unique:
where: inserted_timestamp BETWEEN SYSDATE() - INTERVAL '7 days' AND SYSDATE() - INTERVAL '2 hours'
- not_null:
where: inserted_timestamp BETWEEN SYSDATE() - INTERVAL '7 days' AND SYSDATE() - INTERVAL '2 hours'
- name: DAY
description: "{{ doc('date')}}"
tests:
- not_null
- name: RECEIVER_ID
description: "{{ doc('tx_receiver')}}"
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- STRING
- VARCHAR
- name: TOKENS
description: "{{ doc('tokens_count')}}"
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- INTEGER
- name: ALL_TRANSFERS
description: "{{ doc('all_transfers')}}"
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- INTEGER
- name: OWNERS
description: "{{ doc('owner_count')}}"
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- INTEGER
- name: TRANSACTIONS
description: "{{ doc('tx_count')}}"
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- INTEGER
- name: MINTS
description: "{{ doc('mint_count')}}"
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- INTEGER
- name: INSERTED_TIMESTAMP
description: "{{ doc('inserted_timestamp')}}"
- name: MODIFIED_TIMESTAMP
description: "{{ doc('modified_timestamp')}}"

View File

@ -1,21 +0,0 @@
{{ config(
materialized = 'view',
secure = false,
meta={
'database_tags':{
'table': {
'PURPOSE': 'ATLAS'
}
}
},
tags = ['atlas']
) }}
SELECT
atlas_account_created_id AS fact_accounts_created_id,
DAY,
wallets_created,
inserted_timestamp,
modified_timestamp
FROM
{{ ref('silver__atlas_accounts_created') }}

View File

@ -1,44 +0,0 @@
version: 2
models:
- name: atlas__fact_accounts_created
description: |-
Deprecating Soon - these tables previously supported NEAR Atlas which is no longer live. These tables will be removed on Febreary 1, 2025.
Please cease using these tables.
tests:
- dbt_utils.recency:
datepart: days
field: inserted_timestamp
interval: 1
columns:
- name: FACT_ACCOUNTS_CREATED_ID
description: "{ { doc('id')}}"
tests:
- unique:
where: inserted_timestamp BETWEEN SYSDATE() - INTERVAL '7 days' AND SYSDATE() - INTERVAL '2 hours'
- not_null:
where: inserted_timestamp BETWEEN SYSDATE() - INTERVAL '7 days' AND SYSDATE() - INTERVAL '2 hours'
- name: DAY
description: "{{ doc('date')}}"
tests:
- not_null
- name: WALLETS_CREATED
description: "{{ doc('wallets_created')}}"
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- INTEGER
- name: INSERTED_TIMESTAMP
description: "{{doc('inserted_timestamp')}}"
- name: MODIFIED_TIMESTAMP
description: "{{doc('modified_timestamp')}}"
- name: INVOCATION_ID
description: "{{doc('invocation_id')}}"

View File

@ -1,22 +0,0 @@
{{ config(
materialized = 'view',
meta={
'database_tags':{
'table': {
'PURPOSE': 'ATLAS'
}
}
},
tags = ['atlas']
) }}
SELECT
atlas_maa_id AS fact_maas_id,
day,
maa,
new_maas,
returning_maas,
COALESCE(inserted_timestamp, '2000-01-01' :: TIMESTAMP_NTZ) AS inserted_timestamp,
COALESCE(modified_timestamp, '2000-01-01' :: TIMESTAMP_NTZ) AS modified_timestamp
FROM
{{ ref('silver__atlas_maa') }}

View File

@ -1,48 +0,0 @@
version: 2
models:
- name: atlas__fact_maas
description: |-
Deprecating Soon - these tables previously supported NEAR Atlas which is no longer live. These tables will be removed on Febreary 1, 2025.
Please cease using these tables.
tests:
- dbt_utils.recency:
datepart: days
field: inserted_timestamp
interval: 1
columns:
- name: fact_maas_id
description: "{{ doc('id') }}"
tests:
- not_null:
where: inserted_timestamp BETWEEN SYSDATE() - INTERVAL '7 days' AND SYSDATE() - INTERVAL '2 hours'
- unique:
where: inserted_timestamp BETWEEN SYSDATE() - INTERVAL '7 days' AND SYSDATE() - INTERVAL '2 hours'
- name: day
description: "{{ doc('active_day') }}"
tests:
- not_null
- unique
- name: maa
description: "{{ doc('maa')}}"
tests:
- not_null
- name: new_maas
description: "{{ doc('new_maas') }}"
tests:
- not_null
- name: returning_maas
description: "{{ doc('returning_maas') }}"
tests:
- not_null
- name: inserted_timestamp
description: "{{ doc('inserted_timestamp') }}"
- name: modified_timestamp
description: "{{ doc('modified_timestamp') }}"

View File

@ -1,28 +0,0 @@
{{ config(
materialized = 'view',
secure = false,
meta={
'database_tags':{
'table': {
'PURPOSE': 'ATLAS'
}
}
},
tags = ['atlas']
) }}
WITH TRAILING AS (
SELECT
atlas_nft_30_trailing_id AS fact_nft_monthly_txs_id,
DAY,
txns,
COALESCE(inserted_timestamp, '2000-01-01' :: TIMESTAMP_NTZ) AS inserted_timestamp,
COALESCE(modified_timestamp, '2000-01-01' :: TIMESTAMP_NTZ) AS modified_timestamp
FROM
{{ ref('silver__atlas_nft_30_trailing') }}
)
SELECT
*
FROM
TRAILING

View File

@ -1,41 +0,0 @@
version: 2
models:
- name: atlas__fact_nft_monthly_txs
description: |-
Deprecating Soon - these tables previously supported NEAR Atlas which is no longer live. These tables will be removed on Febreary 1, 2025.
Please cease using these tables.
tests:
- dbt_utils.recency:
datepart: days
field: inserted_timestamp
interval: 1
columns:
- name: FACT_NFT_MONTHLY_TXS_ID
description: "{{ doc('id')}}"
tests:
- unique:
where: inserted_timestamp BETWEEN SYSDATE() - INTERVAL '7 days' AND SYSDATE() - INTERVAL '2 hours'
- not_null:
where: inserted_timestamp BETWEEN SYSDATE() - INTERVAL '7 days' AND SYSDATE() - INTERVAL '2 hours'
- name: DAY
description: "{{ doc('date')}}"
tests:
- not_null
- name: TXNS
description: "{{ doc('tx_count')}}"
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- INTEGER
- name: INSERTED_TIMESTAMP
description: "{{ doc('inserted_timestamp')}}"
- name: MODIFIED_TIMESTAMP
description: "{{ doc('modified_timestamp')}}"

View File

@ -8,8 +8,8 @@
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION ON EQUALITY(tx_hash,receipt_id,receipt_receiver_id,receipt_signer_id,receipt_predecessor_id);",
tags = ['actions', 'curated', 'scheduled_core', 'grail']
) }}
-- depends_on: {{ ref('silver__streamline_transactions_final') }}
-- depends_on: {{ ref('silver__streamline_receipts_final') }}
-- depends_on: {{ ref('silver__transactions_final') }}
-- depends_on: {{ ref('silver__receipts_final') }}
{% if execute %}
@ -41,14 +41,14 @@
SELECT
MIN(block_timestamp) block_timestamp
FROM
{{ ref('silver__streamline_transactions_final') }} A
{{ ref('silver__transactions_final') }} A
WHERE
modified_timestamp >= '{{max_mod}}'
UNION ALL
SELECT
MIN(block_timestamp) block_timestamp
FROM
{{ ref('silver__streamline_receipts_final') }} A
{{ ref('silver__receipts_final') }} A
WHERE
modified_timestamp >= '{{max_mod}}'
)
@ -76,7 +76,7 @@ WITH transactions AS (
transaction_fee AS tx_fee,
modified_timestamp
FROM
{{ ref('silver__streamline_transactions_final') }}
{{ ref('silver__transactions_final') }}
{% if var("MANUAL_FIX") %}
WHERE
@ -92,19 +92,18 @@ receipts AS (
tx_hash,
block_id,
block_timestamp,
receipt_object_id AS receipt_id,
receipt_id,
receiver_id AS receipt_receiver_id,
signer_id AS receipt_signer_id,
receipt_actions :predecessor_id :: STRING AS receipt_predecessor_id,
receipt_json :receipt :Action :signer_id :: STRING AS receipt_signer_id,
predecessor_id AS receipt_predecessor_id,
receipt_succeeded,
gas_burnt AS receipt_gas_burnt,
status_value,
receipt_actions,
outcome_json :outcome :gas_burnt :: NUMBER AS receipt_gas_burnt,
outcome_json :outcome :status :: VARIANT AS status_value,
receipt_json,
_partition_by_block_number,
_inserted_timestamp,
modified_timestamp
FROM
{{ ref('silver__streamline_receipts_final') }}
{{ ref('silver__receipts_final') }}
{% if var("MANUAL_FIX") %}
WHERE
@ -132,9 +131,8 @@ join_data AS (
r.receipt_succeeded,
r.receipt_gas_burnt,
r.status_value,
r.receipt_actions,
r._partition_by_block_number,
r._inserted_timestamp
r.receipt_json,
r._partition_by_block_number
FROM
receipts r
LEFT JOIN transactions t
@ -184,7 +182,7 @@ flatten_actions AS (
) as receipt_status_value,
False AS is_delegated,
INDEX AS action_index,
receipt_actions :receipt :Action :gas_price :: NUMBER AS action_gas_price,
receipt_json :receipt :Action :gas_price :: NUMBER AS action_gas_price,
IFF(
VALUE = 'CreateAccount',
VALUE,
@ -247,12 +245,11 @@ flatten_actions AS (
),
action_data
) AS action_data_parsed,
_partition_by_block_number,
_inserted_timestamp
_partition_by_block_number
FROM
join_data,
LATERAL FLATTEN(
receipt_actions :receipt :Action :actions :: ARRAY
receipt_json :receipt :Action :actions :: ARRAY
)
),
flatten_delegated_actions AS (
@ -333,7 +330,6 @@ SELECT
action_data_parsed AS action_data,
action_gas_price,
_partition_by_block_number,
_inserted_timestamp,
{{ dbt_utils.generate_surrogate_key(
['receipt_id', 'action_index']
) }} AS actions_id,

View File

@ -39,9 +39,9 @@
{% if not min_block_timestamp_day or min_block_timestamp_day == 'None' %}
{% set min_block_timestamp_day = '2020-07-01' %}
{% endif %}
{{ log("min_block_timestamp_day: " ~ min_block_timestamp_day, info=True) }}
{% endif %}
{{ log("min_block_timestamp_day: " ~ min_block_timestamp_day, info=True) }}
WITH hourly_prices AS (
SELECT

View File

@ -1,38 +0,0 @@
{{ config(
materialized = 'view',
secure = false,
tags = ['core']
) }}
WITH actions AS (
SELECT
*
FROM
{{ ref('silver__actions_events_s3') }}
)
SELECT
action_id,
tx_hash,
receipt_object_id AS receipt_id,
predecessor_id,
receiver_id,
signer_id,
block_id,
block_timestamp,
action_index,
action_name,
action_data,
logs,
receipt_succeeded,
COALESCE(
actions_events_id,
{{ dbt_utils.generate_surrogate_key(
['receipt_object_id', 'action_index']
) }}
) AS fact_actions_events_id,
receipt_object_id,
COALESCE(inserted_timestamp, _inserted_timestamp, '2000-01-01' :: TIMESTAMP_NTZ) AS inserted_timestamp,
COALESCE(modified_timestamp, _inserted_timestamp, '2000-01-01' :: TIMESTAMP_NTZ) AS modified_timestamp
FROM
actions

View File

@ -1,130 +0,0 @@
version: 2
models:
- name: core__fact_actions_events
description: |-
Deprecating soon! Please migrate to using the new `core__ez_actions` table.
This view will be removed in Q1 2025.
tests:
- dbt_utils.recency:
datepart: hours
field: block_timestamp
interval: 2
columns:
- name: ACTION_ID
description: "{{ doc('action_id')}}"
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- STRING
- VARCHAR
- name: TX_HASH
description: "{{ doc('tx_hash')}}"
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- STRING
- VARCHAR
- not_null:
where: inserted_timestamp BETWEEN SYSDATE() - INTERVAL '7 days' AND SYSDATE() - INTERVAL '2 hours'
- name: RECEIPT_ID
description: "{{ doc('receipt_id')}}"
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- STRING
- VARCHAR
- name: RECEIVER_ID
description: "{{ doc('receiver_id')}}"
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- STRING
- VARCHAR
- name: PREDECESSOR_ID
description: "{{ doc('predecessor_id')}}"
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- STRING
- VARCHAR
- name: SIGNER_ID
description: "{{ doc('signer_id')}}"
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- STRING
- VARCHAR
- name: BLOCK_ID
description: "{{ doc('block_id')}}"
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: BLOCK_TIMESTAMP
description: "{{ doc('block_timestamp')}}"
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_NTZ
- not_null:
where: inserted_timestamp BETWEEN SYSDATE() - INTERVAL '7 days' AND SYSDATE() - INTERVAL '2 hours'
- name: ACTION_INDEX
description: "{{ doc('action_index')}}"
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- name: ACTION_NAME
description: "{{ doc('action_name')}}"
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- STRING
- VARCHAR
- name: ACTION_DATA
description: "{{ doc('action_data')}}"
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- OBJECT
- VARIANT
- name: LOGS
description: "{{ doc('logs')}}"
- name: RECEIPT_SUCCEEDED
description: "{{ doc('receipt_succeeded')}}"
- name: FACT_ACTIONS_EVENTS_ID
description: "{{ doc('id')}}"
tests:
- unique:
where: inserted_timestamp BETWEEN SYSDATE() - INTERVAL '7 days' AND SYSDATE() - INTERVAL '2 hours' AND FACT_ACTIONS_EVENTS_ID != 'cf646ad92e6df243ffabf07c47c0f2c1'
- not_null:
where: inserted_timestamp BETWEEN SYSDATE() - INTERVAL '7 days' AND SYSDATE() - INTERVAL '2 hours'
- name: INSERTED_TIMESTAMP
description: "{{ doc('inserted_timestamp')}}"
- name: MODIFIED_TIMESTAMP
description: "{{ doc('modified_timestamp')}}"

View File

@ -1,38 +0,0 @@
{{ config(
materialized = 'view',
secure = false,
tags = ['core']
) }}
WITH actions_events_function_call AS (
SELECT
*
FROM
{{ ref('silver__actions_events_function_call_s3') }}
)
SELECT
action_id,
tx_hash,
receiver_id,
predecessor_id,
signer_id,
block_id,
block_timestamp,
action_name,
method_name,
args,
deposit,
attached_gas,
logs,
receipt_succeeded,
COALESCE(
actions_events_function_call_id,
{{ dbt_utils.generate_surrogate_key(
['action_id']
) }}
) AS fact_actions_events_function_call_id,
COALESCE(inserted_timestamp, _inserted_timestamp, '2000-01-01' :: TIMESTAMP_NTZ) AS inserted_timestamp,
COALESCE(modified_timestamp, _inserted_timestamp, '2000-01-01' :: TIMESTAMP_NTZ) AS modified_timestamp
FROM
actions_events_function_call

View File

@ -1,146 +0,0 @@
version: 2
models:
- name: core__fact_actions_events_function_call
description: |-
Deprecating soon! Please migrate to using the new `core__ez_actions` table. All FunctionCalls are decoded in the args element where action_name = 'FunctionCall'.
This view will be removed in Q1 2025.
tests:
- dbt_utils.recency:
datepart: hours
field: block_timestamp
interval: 2
columns:
- name: ACTION_ID
description: "{{ doc('action_id')}}"
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- STRING
- VARCHAR
- name: TX_HASH
description: "{{ doc('tx_hash')}}"
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- STRING
- VARCHAR
- not_null:
where: inserted_timestamp BETWEEN SYSDATE() - INTERVAL '7 days' AND SYSDATE() - INTERVAL '2 hours'
- name: RECEIVER_ID
description: "{{ doc('receiver_id')}}"
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- STRING
- VARCHAR
- name: PREDECESSOR_ID
description: "{{ doc('predecessor_id')}}"
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- STRING
- VARCHAR
- name: SIGNER_ID
description: "{{ doc('signer_id')}}"
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- STRING
- VARCHAR
- name: BLOCK_ID
description: "{{ doc('block_id')}}"
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: BLOCK_TIMESTAMP
description: "{{ doc('block_timestamp')}}"
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_NTZ
- not_null:
where: inserted_timestamp BETWEEN SYSDATE() - INTERVAL '7 days' AND SYSDATE() - INTERVAL '2 hours'
- name: ACTION_NAME
description: "{{ doc('action_name')}}"
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- STRING
- VARCHAR
- name: METHOD_NAME
description: "{{ doc('method_name')}}"
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- STRING
- VARCHAR
- name: ARGS
description: "{{ doc('args')}}"
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- VARIANT
- OBJECT
- name: DEPOSIT
description: "{{ doc('deposit')}}"
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: ATTACHED_GAS
description: "{{ doc('attached_gas')}}"
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: LOGS
description: "{{ doc('logs')}}"
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- ARRAY
- VARIANT
- OBJECT
- name: RECEIPT_SUCCEEDED
description: "{{ doc('receipt_succeeded')}}"
tests:
- not_null
- name: FACT_ACTIONS_EVENTS_FUNCTION_CALL_ID
description: "{{doc('id')}}"
tests:
- unique:
where: inserted_timestamp BETWEEN SYSDATE() - INTERVAL '7 days' AND SYSDATE() - INTERVAL '2 hours'
- not_null:
where: inserted_timestamp BETWEEN SYSDATE() - INTERVAL '7 days' AND SYSDATE() - INTERVAL '2 hours'
- name: INSERTED_TIMESTAMP
description: "{{doc('inserted_timestamp')}}"
- name: MODIFIED_TIMESTAMP
description: "{{doc('modified_timestamp')}}"

View File

@ -9,47 +9,42 @@ WITH blocks AS (
SELECT
*
FROM
{{ ref('silver__streamline_blocks') }}
{{ ref('silver__blocks_final') }}
)
SELECT
block_id,
block_timestamp,
block_hash,
block_author,
header,
header :challenges_result :: ARRAY AS block_challenges_result,
header :challenges_root :: STRING AS block_challenges_root,
header :chunk_headers_root :: STRING AS chunk_headers_root,
header :chunk_tx_root :: STRING AS chunk_tx_root,
header :chunk_mask :: ARRAY AS chunk_mask,
header :chunk_receipts_root :: STRING AS chunk_receipts_root,
chunks,
header :chunks_included :: NUMBER AS chunks_included,
epoch_id,
header :epoch_sync_data_hash :: STRING AS epoch_sync_data_hash,
gas_price,
header :last_ds_final_block :: STRING AS last_ds_final_block,
header :last_final_block :: STRING AS last_final_block,
latest_protocol_version,
header: next_bp_hash :: STRING AS next_bp_hash,
next_epoch_id,
header :outcome_root :: STRING AS outcome_root,
header_json AS header,
header_json :challenges_result :: ARRAY AS block_challenges_result,
header_json :challenges_root :: STRING AS block_challenges_root,
header_json :chunk_headers_root :: STRING AS chunk_headers_root,
header_json :chunk_tx_root :: STRING AS chunk_tx_root,
header_json :chunk_mask :: ARRAY AS chunk_mask,
header_json :chunk_receipts_root :: STRING AS chunk_receipts_root,
chunks_json AS chunks,
header_json :chunks_included :: NUMBER AS chunks_included,
header_json :epoch_id :: STRING AS epoch_id,
header_json :epoch_sync_data_hash :: STRING AS epoch_sync_data_hash,
header_json :gas_price :: FLOAT AS gas_price,
header_json :last_ds_final_block :: STRING AS last_ds_final_block,
header_json :last_final_block :: STRING AS last_final_block,
header_json :latest_protocol_version :: INT AS latest_protocol_version,
header_json :next_bp_hash :: STRING AS next_bp_hash,
header_json :next_epoch_id :: STRING AS next_epoch_id,
header_json :outcome_root :: STRING AS outcome_root,
prev_hash,
header :prev_height :: NUMBER AS prev_height,
header :prev_state_root :: STRING AS prev_state_root,
header :random_value :: STRING AS random_value,
header :rent_paid :: FLOAT AS rent_paid,
header :signature :: STRING AS signature,
total_supply,
validator_proposals,
validator_reward,
COALESCE(
streamline_blocks_id,
{{ dbt_utils.generate_surrogate_key(
['block_id']
) }}
) AS fact_blocks_id,
COALESCE(inserted_timestamp, _inserted_timestamp, '2000-01-01' :: TIMESTAMP_NTZ) AS inserted_timestamp,
COALESCE(modified_timestamp, _inserted_timestamp, '2000-01-01' :: TIMESTAMP_NTZ) AS modified_timestamp
header_json :prev_height :: NUMBER AS prev_height,
header_json :prev_state_root :: STRING AS prev_state_root,
header_json :random_value :: STRING AS random_value,
header_json :rent_paid :: FLOAT AS rent_paid,
header_json :signature :: STRING AS signature,
header_json :total_supply :: FLOAT AS total_supply,
header_json :validator_proposals :: ARRAY AS validator_proposals,
header_json :validator_reward :: FLOAT AS validator_reward,
blocks_final_id AS fact_blocks_id,
inserted_timestamp,
modified_timestamp
FROM
blocks

View File

@ -33,7 +33,7 @@ models:
tests:
- not_null
- unique:
where: inserted_timestamp BETWEEN SYSDATE() - INTERVAL '7 days' AND SYSDATE() - INTERVAL '2 hours'
where: inserted_timestamp >= SYSDATE() - INTERVAL '7 days'
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- STRING
@ -154,7 +154,8 @@ models:
- name: GAS_PRICE
description: "{{ doc('gas_price')}}"
tests:
- not_null
- not_null:
where: inserted_timestamp >= SYSDATE() - INTERVAL '7 days'
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
@ -181,11 +182,12 @@ models:
- name: LATEST_PROTOCOL_VERSION
description: "{{ doc('latest_protocol_version')}}"
tests:
- not_null
- not_null:
where: inserted_timestamp >= SYSDATE() - INTERVAL '7 days'
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- INT
- name: NEXT_BP_HASH
description: "{{ doc('next_bp_hash')}}"
@ -297,9 +299,9 @@ models:
description: "{{doc('id')}}"
tests:
- unique:
where: inserted_timestamp BETWEEN SYSDATE() - INTERVAL '7 days' AND SYSDATE() - INTERVAL '2 hours'
where: inserted_timestamp >= SYSDATE() - INTERVAL '7 days'
- not_null:
where: inserted_timestamp BETWEEN SYSDATE() - INTERVAL '7 days' AND SYSDATE() - INTERVAL '2 hours'
where: inserted_timestamp >= SYSDATE() - INTERVAL '7 days'
- name: INSERTED_TIMESTAMP
description: "{{doc('inserted_timestamp')}}"

View File

@ -15,7 +15,7 @@ SELECT
block_id,
block_timestamp,
tx_hash,
receipt_object_id AS receipt_id,
COALESCE(receipt_id, receipt_object_id) AS receipt_id,
predecessor_id,
receiver_id,
signer_id,
@ -30,7 +30,6 @@ SELECT
['log_id']
) }}
) AS fact_logs_id,
receipt_object_id, -- will drop col eventually
COALESCE(inserted_timestamp, _inserted_timestamp, '2000-01-01' :: TIMESTAMP_NTZ) AS inserted_timestamp,
COALESCE(modified_timestamp, _inserted_timestamp, '2000-01-01' :: TIMESTAMP_NTZ) AS modified_timestamp
FROM

View File

@ -4,37 +4,24 @@
tags = ['core']
) }}
WITH receipts AS (
SELECT
*
FROM
{{ ref('silver__streamline_receipts_final') }}
)
SELECT
block_timestamp,
block_id,
tx_hash,
receipt_object_id AS receipt_id,
receipt_outcome_id,
receipt_id,
outcome_json :outcome :receipt_ids :: ARRAY AS receipt_outcome_id, -- TODO DEPRECATE THIS, it's in outcome_json
receiver_id,
receipt_actions :predecessor_id :: STRING AS predecessor_id,
receipt_actions AS actions,
execution_outcome AS outcome,
gas_burnt,
status_value,
logs,
proof,
metadata,
predecessor_id,
receipt_json AS actions, -- TODO this should be renamed. It's not just actions, it's the full receipt input
outcome_json AS outcome,
outcome_json :outcome :gas_burnt :: NUMBER AS gas_burnt,
outcome_json :outcome :status :: VARIANT AS status_value,
outcome_json :outcome :logs :: ARRAY AS logs,
outcome_json :proof :: ARRAY AS proof, -- TODO DEPRECATE THIS, it's in outcome_json
outcome_json :outcome :metadata :: VARIANT AS metadata, -- TODO DEPRECATE THIS, it's in outcome_json
receipt_succeeded,
COALESCE(
streamline_receipts_final_id,
{{ dbt_utils.generate_surrogate_key(
['receipt_object_id']
) }}
) AS fact_receipts_id,
receipt_object_id, -- to be deprecated
COALESCE(inserted_timestamp, _inserted_timestamp, '2000-01-01' :: TIMESTAMP_NTZ) AS inserted_timestamp,
COALESCE(modified_timestamp, _inserted_timestamp, '2000-01-01' :: TIMESTAMP_NTZ) AS modified_timestamp
receipts_final_id AS fact_receipts_id,
inserted_timestamp,
modified_timestamp
FROM
receipts
{{ ref('silver__receipts_final') }}

View File

@ -4,34 +4,21 @@
tags = ['core']
) }}
WITH transactions AS (
SELECT
*
FROM
{{ ref('silver__streamline_transactions_final') }}
)
SELECT
tx_hash,
block_id,
block_hash,
block_timestamp,
nonce,
signature,
transaction_json :nonce :: INT AS nonce,
transaction_json :signature :: STRING AS signature,
tx_receiver,
tx_signer,
tx,
transaction_json AS tx,
gas_used,
transaction_fee,
attached_gas,
tx_succeeded,
COALESCE(
streamline_transactions_final_id,
{{ dbt_utils.generate_surrogate_key(
['tx_hash']
) }}
) AS fact_transactions_id,
COALESCE(inserted_timestamp, _inserted_timestamp, '2000-01-01' :: TIMESTAMP_NTZ) AS inserted_timestamp,
COALESCE(modified_timestamp, _inserted_timestamp, '2000-01-01' :: TIMESTAMP_NTZ) AS modified_timestamp
transactions_final_id AS fact_transactions_id,
inserted_timestamp,
modified_timestamp
FROM
transactions
{{ ref('silver__transactions_final') }}

View File

@ -65,7 +65,8 @@ models:
- name: DIP4_VERSION
description: "{{ doc('dip4_version') }}"
tests:
- not_null
- not_null:
where: memo != 'refund'
- name: GAS_BURNT
description: "{{ doc('gas_burnt') }}"

View File

@ -1,37 +0,0 @@
{{ config(
materialized = 'view',
tags = ['core', 'horizon']
) }}
WITH horizon AS (
SELECT
action_id_horizon,
receipt_object_id,
tx_hash,
block_id,
block_timestamp,
method_name,
args,
deposit,
attached_gas,
receiver_id,
signer_id,
receipt_succeeded,
COALESCE(
horizon_decoded_actions_id,
{{ dbt_utils.generate_surrogate_key(
['action_id_horizon']
) }}
) AS fact_decoded_actions_id,
COALESCE(inserted_timestamp, _inserted_timestamp, '2000-01-01' :: TIMESTAMP_NTZ) AS inserted_timestamp,
COALESCE(modified_timestamp, _inserted_timestamp, '2000-01-01' :: TIMESTAMP_NTZ) AS modified_timestamp
FROM
{{ ref('silver_horizon__decoded_actions') }}
WHERE
method_name != 'set'
)
SELECT
*
FROM
horizon

View File

@ -1,64 +0,0 @@
version: 2
models:
- name: horizon__fact_decoded_actions
description: |-
Deprecating Soon - please cease using this table by February 1, 2025.
columns:
- name: ACTION_ID_HORIZON
description: "{{ doc('action_id')}}"
- name: RECEIPT_OBJECT_ID
description: "{{ doc('receipt_object_id')}}"
- name: TX_HASH
description: "{{ doc('tx_hash')}}"
tests:
- not_null:
where: inserted_timestamp BETWEEN SYSDATE() - INTERVAL '7 days' AND SYSDATE() - INTERVAL '2 hours'
- name: BLOCK_ID
description: "{{ doc('block_id')}}"
- name: BLOCK_TIMESTAMP
description: "{{ doc('block_timestamp')}}"
tests:
- not_null:
where: inserted_timestamp BETWEEN SYSDATE() - INTERVAL '7 days' AND SYSDATE() - INTERVAL '2 hours'
- name: METHOD_NAME
description: "{{ doc('method_name')}}"
- name: ARGS
description: "{{ doc('args')}}"
- name: DEPOSIT
description: "{{ doc('deposit')}}"
- name: ATTACHED_GAS
description: "{{ doc('attached_gas')}}"
- name: RECEIVER_ID
description: "{{ doc('receiver_id')}}"
- name: SIGNER_ID
description: "{{ doc('signer_id')}}"
- name: RECEIPT_SUCCEEDED
description: "{{ doc('receipt_succeeded')}}"
- name: FACT_DECODED_ACTIONS_ID
description: "{{doc('id')}}"
tests:
- unique:
where: inserted_timestamp BETWEEN SYSDATE() - INTERVAL '7 days' AND SYSDATE() - INTERVAL '2 hours'
- not_null:
where: inserted_timestamp BETWEEN SYSDATE() - INTERVAL '7 days' AND SYSDATE() - INTERVAL '2 hours'
- name: INSERTED_TIMESTAMP
description: "{{doc('inserted_timestamp')}}"
- name: MODIFIED_TIMESTAMP
description: "{{doc('modified_timestamp')}}"

View File

@ -1,4 +1,4 @@
-- depends_on: {{ ref('silver__streamline_blocks') }}
-- depends_on: {{ ref('silver__blocks_final') }}
{{ config(
materialized = 'incremental',
unique_key = 'test_timestamp',
@ -16,8 +16,8 @@ WITH blocks_joined AS (
b.block_timestamp AS next_block_timestamp,
b.prev_hash AS next_prev_hash
FROM
{{ ref('silver__streamline_blocks') }} A -- Streamline Migration TODO - change this to fact blocks once table
LEFT JOIN {{ ref('silver__streamline_blocks') }}
{{ ref('silver__blocks_final') }} A
LEFT JOIN {{ ref('silver__blocks_final') }}
b
ON A.block_hash = b.prev_hash
WHERE
@ -34,7 +34,7 @@ AND (
SELECT
MIN(block_id) AS block_id
FROM
{{ ref('silver__streamline_blocks') }} -- Streamline Migration TODO - change this to fact blocks once table
{{ ref('silver__blocks_final') }}
WHERE
block_timestamp BETWEEN DATEADD('hour', -96, SYSDATE())
AND DATEADD('hour', -95, SYSDATE())

View File

@ -1,4 +1,4 @@
-- depends_on: {{ ref('silver__streamline_blocks') }}
-- depends_on: {{ ref('silver__blocks_final') }}
{{ config(
materialized = 'incremental',
unique_key = 'test_timestamp',
@ -12,11 +12,11 @@ WITH block_chunks_included AS (
SELECT
block_id,
block_timestamp,
header :chunks_included :: INT AS chunks_included,
header_json :chunks_included :: INT AS chunks_included,
_partition_by_block_number,
_inserted_timestamp
inserted_timestamp AS _inserted_timestamp
FROM
{{ ref('silver__streamline_blocks') }} -- Streamline Migration TODO - change this to fact blocks once table
{{ ref('silver__blocks_final') }}
WHERE
block_timestamp <= DATEADD('hour', -12, SYSDATE())
{% if is_incremental() %}
@ -29,7 +29,7 @@ AND (
SELECT
MIN(block_id) AS block_id
FROM
{{ ref('silver__streamline_blocks') }} -- Streamline Migration TODO - change this to fact blocks once table
{{ ref('silver__blocks_final') }}
WHERE
block_timestamp BETWEEN DATEADD('hour', -96, SYSDATE())
AND DATEADD('hour', -95, SYSDATE())
@ -70,13 +70,13 @@ summary_stats AS (
),
chunks_per_block AS (
SELECT
block_id,
origin_block_id AS block_id,
MAX(_inserted_timestamp) AS _inserted_timestamp,
COUNT(
DISTINCT chunk :header :chunk_hash :: STRING
chunk_hash
) AS chunk_ct
FROM
{{ ref('silver__streamline_shards') }} -- Streamline Migration TODO - change this to fact shards once table
{{ ref('silver__transactions_v2') }}
WHERE
block_id >= (SELECT min_block FROM summary_stats)
AND

View File

@ -4,7 +4,7 @@
full_refresh = False,
tags = ['observability']
) }}
-- TODO this can be deprecated. Not a good metric of completeness.
WITH summary_stats AS (
SELECT
@ -14,7 +14,7 @@ WITH summary_stats AS (
MAX(block_timestamp) AS max_block_timestamp,
COUNT(1) AS blocks_tested
FROM
{{ ref('silver__streamline_blocks') }} -- Streamline Migration TODO - change this to fact blocks once table
{{ ref('silver__blocks_final') }}
WHERE
block_timestamp <= DATEADD('hour', -12, SYSDATE())
@ -28,7 +28,7 @@ AND (
SELECT
MIN(block_id) AS block_id
FROM
{{ ref('silver__streamline_blocks') }} -- Streamline Migration TODO - change this to fact blocks once table
{{ ref('silver__blocks_final') }}
WHERE
block_timestamp BETWEEN DATEADD('hour', -96, SYSDATE())
AND DATEADD('hour', -95, SYSDATE())
@ -82,7 +82,7 @@ broken_blocks AS (
SELECT
DISTINCT block_id as block_id
FROM
{{ ref('silver__streamline_receipts_final') }} -- Streamline Migration TODO - change this to fact receipts once table
{{ ref('silver__receipts_final') }}
r
LEFT JOIN {{ ref('silver__logs_s3') }}
l USING (
@ -93,7 +93,7 @@ broken_blocks AS (
WHERE
l.tx_hash IS NULL
AND ARRAY_SIZE(
r.logs
r.outcome_json :outcome :logs :: ARRAY
) > 0
),
impacted_blocks AS (

View File

@ -0,0 +1,4 @@
# Deprecating Actions Models
All 3 of these models will be deprecated and dropped.
There are a number of silver models that need to be migrated to core.ez_actions. Migrate those by EOM (March 2025).

View File

@ -1,55 +0,0 @@
version: 2
models:
- name: silver__actions_events_addkey_s3
description: |-
Deprecting soon - no longer updating.
columns:
- name: ACTION_ID
description: "{{ doc('action_id')}}"
- name: TX_HASH
description: "{{ doc('tx_hash')}}"
- name: BLOCK_ID
description: "{{ doc('block_id')}}"
- name: BLOCK_TIMESTAMP
description: "{{ doc('block_timestamp')}}"
- name: NONCE
description: "{{ doc('nonce')}}"
- name: PUBLIC_KEY
description: "{{ doc('public_key')}}"
- name: PERMISSION
description: "{{ doc('permission')}}"
- name: ALLOWANCE
description: "{{ doc('allowance')}}"
- name: METHOD_NAME
description: "{{ doc('method_name')}}"
- name: RECEIVER_ID
description: "{{ doc('receiver_id')}}"
- name: _PARTITION_BY_BLOCK_NUMBER
description: "{{ doc('_partition_by_block_number')}}"
- name: _INSERTED_TIMESTAMP
description: "{{ doc('_inserted_timestamp')}}"
- name: actions_events_addkey_id
description: "{{doc('id')}}"
- name: inserted_timestamp
description: "{{doc('inserted_timestamp')}}"
- name: modified_timestamp
description: "{{doc('modified_timestamp')}}"
- name: _invocation_id
description: "{{doc('invocation_id')}}"

View File

@ -1,73 +0,0 @@
version: 2
models:
- name: silver__actions_events_function_call_s3
description: |-
This table extracts all FunctionCall events from actions and decodes the arguments for easy use.
columns:
- name: ACTION_ID
description: "{{ doc('action_id')}}"
- name: TX_HASH
description: "{{ doc('tx_hash')}}"
tests:
- not_null:
where: _inserted_timestamp <= current_timestamp - interval '1 hour'
- name: RECEIVER_ID
description: "{{ doc('receiver_id')}}"
- name: PREDECESSOR_ID
description: "{{ doc('predecessor_id')}}"
- name: SIGNER_ID
description: "{{ doc('signer_id')}}"
- name: BLOCK_ID
description: "{{ doc('block_id')}}"
- name: BLOCK_TIMESTAMP
description: "{{ doc('block_timestamp')}}"
tests:
- not_null:
where: _inserted_timestamp <= current_timestamp - interval '1 hour'
- name: ACTION_NAME
description: "{{ doc('action_name')}}"
- name: METHOD_NAME
description: "{{ doc('method_name')}}"
- name: ARGS
description: "{{ doc('args')}}"
- name: DEPOSIT
description: "{{ doc('deposit')}}"
- name: ATTACHED_GAS
description: "{{ doc('attached_gas')}}"
- name: LOGS
description: "{{ doc('logs')}}"
- name: RECEIPT_SUCCEEDED
description: "{{ doc('receipt_succeeded')}}"
- name: _PARTITION_BY_BLOCK_NUMBER
description: "{{ doc('_partition_by_block_number')}}"
- name: _INSERTED_TIMESTAMP
description: "{{ doc('_inserted_timestamp')}}"
- name: ACTIONS_EVENTS_FUNCTION_CALL_ID
description: "{{doc('id')}}"
- name: INSERTED_TIMESTAMP
description: "{{doc('inserted_timestamp')}}"
- name: MODIFIED_TIMESTAMP
description: "{{doc('modified_timestamp')}}"
- name: _INVOCATION_ID
description: "{{doc('invocation_id')}}"

View File

@ -15,21 +15,20 @@ WITH receipts AS (
SELECT
tx_hash,
receipt_object_id,
receipt_id AS receipt_object_id,
receiver_id,
signer_id,
receipt_json :receipt :Action :signer_id :: STRING AS signer_id,
block_id,
block_timestamp,
chunk_hash,
logs,
receipt_actions,
execution_outcome,
outcome_json :outcome :logs :: ARRAY AS logs,
receipt_json AS receipt_actions,
outcome_json AS execution_outcome,
receipt_succeeded,
gas_burnt,
_partition_by_block_number,
_inserted_timestamp
outcome_json :outcome :gas_burnt :: NUMBER AS gas_burnt,
_partition_by_block_number
FROM
{{ ref('silver__streamline_receipts_final') }}
{{ ref('silver__receipts_final') }}
{% if var("MANUAL_FIX") %}
WHERE
@ -64,12 +63,11 @@ flatten_actions AS (
VALUE AS action_object,
INDEX AS action_index,
receipt_succeeded,
_partition_by_block_number,
_inserted_timestamp
_partition_by_block_number
FROM
receipts,
LATERAL FLATTEN(
input => receipt_actions :receipt :Action :actions
input => receipt_actions :receipt :Action :actions :: ARRAY
)
),
FINAL AS (
@ -95,8 +93,7 @@ FINAL AS (
gas_price,
gas_burnt,
tokens_burnt,
_partition_by_block_number,
_inserted_timestamp
_partition_by_block_number
FROM
flatten_actions,
LATERAL FLATTEN(

View File

@ -1,82 +0,0 @@
version: 2
models:
- name: silver__actions_events_s3
description: |-
This table extracts all action events from a receipt and stores the argument data under action_data.
columns:
- name: ACTION_ID
description: "{{ doc('action_id')}}"
tests:
- unique:
where: tx_hash != 'J4CZZQrZK6kYPVLkrdbTEpcqhUNZiRxktbMzHviqeGgf'
- not_null
- name: BLOCK_ID
description: "{{ doc('block_id')}}"
- name: BLOCK_TIMESTAMP
description: "{{ doc('block_timestamp')}}"
tests:
- not_null:
where: _inserted_timestamp <= current_timestamp - interval '1 hour'
- name: TX_HASH
description: "{{ doc('tx_hash')}}"
tests:
- not_null:
where: _inserted_timestamp <= current_timestamp - interval '1 hour'
- name: RECEIPT_OBJECT_ID
description: "{{ doc('receipt_object_id')}}"
- name: CHUNK_HASH
description: "{{ doc('chunk_hash')}}"
tests:
- not_null:
where: "block_id not in (34691244, 34691277)"
- name: RECEIVER_ID
description: "{{ doc('receiver_id')}}"
- name: PREDECESSOR_ID
description: "{{ doc('predecessor_id')}}"
- name: SIGNER_ID
description: "{{ doc('signer_id')}}"
- name: ACTION_INDEX
description: "{{ doc('action_index')}}"
- name: ACTION_NAME
description: "{{ doc('action_name')}}"
- name: ACTION_DATA
description: "{{ doc('action_data')}}"
- name: LOGS
description: "{{ doc('logs')}}"
- name: RECEIPT_SUCCEEDED
description: "{{ doc('receipt_succeeded')}}"
- name: _PARTITION_BY_BLOCK_NUMBER
description: "{{ doc('_partition_by_block_number')}}"
- name: _INSERTED_TIMESTAMP
description: "{{ doc('_inserted_timestamp')}}"
- name: ACTIONS_EVENTS_ID
description: "{{doc('id')}}"
- name: INSERTED_TIMESTAMP
description: "{{doc('inserted_timestamp')}}"
- name: MODIFIED_TIMESTAMP
description: "{{doc('modified_timestamp')}}"
- name: _INVOCATION_ID
description: "{{doc('invocation_id')}}"

View File

@ -1,63 +0,0 @@
-- TODO slated for deprecation and drop
{{ config(
materialized = 'incremental',
unique_key = 'atlas_account_created_id',
incremental_strategy = 'merge',
merge_exclude_columns = ["inserted_timestamp"],
tags = ['atlas']
) }}
WITH accts AS (
SELECT
receiver_id,
block_timestamp,
modified_timestamp AS _modified_timestamp
FROM
{{ ref('silver__streamline_receipts_final') }}
WHERE
receipt_succeeded
{% if var("MANUAL_FIX") %}
AND {{ partition_load_manual('no_buffer') }}
{% else %}
{% if is_incremental() %}
AND _modified_timestamp >= (
SELECT
MAX(_modified_timestamp) - INTERVAL '2 days'
FROM
{{ this }}
)
{% endif %}
{% endif %}
qualify ROW_NUMBER() over (
PARTITION BY receiver_id
ORDER BY
block_timestamp
) = 1
),
FINAL AS (
SELECT
block_timestamp :: DATE AS "DAY",
COUNT(*) AS wallets_created,
MAX(_modified_timestamp) AS _modified_timestamp
FROM
accts
GROUP BY
1
)
SELECT
day,
wallets_created,
_modified_timestamp,
{{ dbt_utils.generate_surrogate_key(
['DAY']
) }} AS atlas_account_created_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp,
'{{ invocation_id }}' AS _invocation_id
FROM
FINAL
WHERE
DAY IS NOT NULL

View File

@ -1,35 +0,0 @@
version: 2
models:
- name: silver__atlas_accounts_created
description: |-
Wallet creation on NEAR
columns:
- name: atlas_account_created_id
description: "{{ doc('id')}}"
tests:
- not_null
- unique
- name: day
description: "{{ doc('date')}}"
tests:
- not_null
- name: wallets_created
description: "{{ doc('wallets_created')}}"
tests:
- not_null
- name: inserted_timestamp
description: "{{doc('inserted_timestamp')}}"
tests:
- not_null
- name: modified_timestamp
description: "{{doc('modified_timestamp')}}"
tests:
- not_null
- name: _invocation_id
description: "{{doc('invocation_id')}}"

View File

@ -1,66 +0,0 @@
-- TODO slated for deprecation and drop
{{ config(
materialized = 'incremental',
incremental_stratege = 'merge',
merge_exclude_columns = ["inserted_timestamp"],
unique_key = 'address',
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION",
tags = ['atlas']
) }}
WITH txs AS (
SELECT
tx_signer AS address,
block_id,
block_timestamp,
tx_hash,
_partition_by_block_number,
_inserted_timestamp,
modified_timestamp AS _modified_timestamp
FROM
{{ ref('silver__streamline_transactions_final') }}
WHERE
{% if var("MANUAL_FIX") %}
{{ partition_load_manual('no_buffer') }}
{% else %}
{% if var('IS_MIGRATION') %}
{{ incremental_load_filter('_inserted_timestamp') }}
{% else %}
{{ incremental_load_filter('_modified_timestamp') }}
{% endif %}
{% endif %}
),
FINAL AS (
SELECT
address,
MIN(block_timestamp) AS first_tx_timestamp,
MIN(block_id) AS first_tx_block_id,
MIN(_partition_by_block_number) AS _partition_by_block_number,
MIN(_inserted_timestamp) AS _inserted_timestamp,
MIN(_modified_timestamp) AS _modified_timestamp
FROM
txs
GROUP BY
1
)
SELECT
{{ dbt_utils.generate_surrogate_key(
['address']
) }} AS atlas_address_first_action_id,
address,
first_tx_timestamp,
first_tx_block_id,
_partition_by_block_number,
_inserted_timestamp,
_modified_timestamp,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp,
'{{ invocation_id }}' AS _invocation_id
FROM
FINAL qualify ROW_NUMBER() over (
PARTITION BY address
ORDER BY
first_tx_timestamp
) = 1

View File

@ -1,54 +0,0 @@
version: 2
models:
- name: silver__atlas_address_first_action
description: |-
Parses transactions table for the block number and timestamp of a wallets first signed transaction.
columns:
- name: atlas_address_first_action_id
description: "{{ doc('id') }}"
tests:
- not_null
- unique
- name: address
description: "{{ doc('address') }}"
tests:
- not_null
- unique
- name: first_tx_timestamp
description: "{{ doc('block_timestamp')}}"
tests:
- not_null
- name: first_tx_block_id
description: "{{ doc('block_id') }}"
tests:
- not_null
- name: _partition_by_block_number
description: "{{ doc('_partition_by_block_number') }}"
- name: _inserted_timestamp
description: "{{ doc('_inserted_timestamp') }}"
tests:
- name: not_null_silver__atlas_address_first_action_INSERTED_TIMESTAMP_
test_name: not_null
- name: _modified_timestamp
description: "{{ doc('_modified_timestamp') }}"
- name: inserted_timestamp
description: "{{ doc('inserted_timestamp') }}"
tests:
- not_null
- name: modified_timestamp
description: "{{ doc('modified_timestamp') }}"
tests:
- not_null
- name: _invocation_id
description: "{{ doc('invocation_id') }}"

View File

@ -1,109 +0,0 @@
-- TODO slated for deprecation and drop
{{ config(
materialized = 'incremental',
incremental_stratege = 'merge',
merge_exclude_columns = ["inserted_timestamp"],
unique_key = 'day',
tags = ['atlas']
) }}
WITH dates AS (
SELECT
date_day AS day
FROM
{{ source(
'crosschain',
'dim_dates'
) }}
{% if is_incremental() %}
WHERE
date_day > (
SELECT
MAX(day)
FROM
{{ this }}
)
AND date_day < SYSDATE() :: DATE
{% else %}
WHERE
date_day BETWEEN '2020-07-22'
AND SYSDATE() :: DATE
{% endif %}
),
signer_first_date AS (
SELECT
address,
first_tx_timestamp
FROM
{{ ref('silver__atlas_address_first_action') }}
),
txns AS (
SELECT
block_timestamp :: DATE AS day,
tx_signer,
first_tx_timestamp,
_inserted_timestamp
FROM
{{ ref('silver__streamline_transactions_final') }}
t
LEFT JOIN signer_first_date s
ON t.tx_signer = s.address
{% if var("MANUAL_FIX") %}
WHERE
{{ partition_load_manual('no_buffer') }}
{% else %}
{% if is_incremental() %}
WHERE
block_timestamp :: DATE >= (
SELECT
MAX(day)
FROM
{{ this }}
) - INTERVAL '30 days'
{% endif %}
{% endif %}
),
FINAL AS (
SELECT
d.day,
COUNT(
DISTINCT tx_signer
) AS maa,
COUNT(
DISTINCT IFF(
first_tx_timestamp >= d.day - INTERVAL '30 Days'
AND first_tx_timestamp < d.day,
tx_signer,
NULL
)
) AS new_maas,
MAX(_inserted_timestamp) AS _inserted_timestamp
FROM
dates d
LEFT JOIN txns t
ON t.day < d.day
AND t.day >= d.day - INTERVAL '30 days'
WHERE
d.day != SYSDATE() :: DATE
GROUP BY
1
)
SELECT
{{ dbt_utils.generate_surrogate_key(
['day']
) }} AS atlas_maa_id,
day,
maa,
new_maas,
maa - new_maas AS returning_maas,
_inserted_timestamp,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp,
'{{ invocation_id }}' AS _invocation_id
FROM
FINAL

View File

@ -1,53 +0,0 @@
version: 2
models:
- name: silver__atlas_maa
description: |-
Monthly Active Accounts (wallets) on NEAR, including new and returning wallets, calculated over a rolling 30 day window. An active account, here, is defined as the signing of at least one transaction.
columns:
- name: atlas_maa_id
description: "{{ doc('id') }}"
tests:
- not_null
- unique
- name: day
description: "{{ doc('active_day') }}"
tests:
- not_null
- unique
- name: maa
description: "{{ doc('maa')}}"
tests:
- not_null
- name: new_maas
description: "{{ doc('new_maas') }}"
tests:
- not_null
- name: returning_maas
description: "{{ doc('returning_maas') }}"
tests:
- not_null
- name: _inserted_timestamp
description: "{{ doc('_inserted_timestamp') }}"
tests:
- name: not_null_silver__atlas_near_maa_INSERTED_TIMESTAMP_
test_name: not_null
- name: inserted_timestamp
description: "{{ doc('inserted_timestamp') }}"
tests:
- not_null
- name: modified_timestamp
description: "{{ doc('modified_timestamp') }}"
tests:
- not_null
- name: _invocation_id
description: "{{ doc('invocation_id') }}"

View File

@ -1,51 +0,0 @@
-- TODO slated for deprecation and drop
{{ config(
materialized = 'incremental',
unique_key = 'atlas_nft_30_trailing_id',
incremental_strategy = "merge",
merge_exclude_columns = ["inserted_timestamp"],
tags = ['atlas']
) }}
WITH date_range AS (
SELECT
date_day AS DAY
FROM
{{ ref('silver__dates') }}
WHERE
{% if is_incremental() %}
date_day >= SYSDATE() - INTERVAL '3 DAY'
{% else %}
date_day >= '2021-01-01' -- first day of data
{% endif %}
AND date_day <= SYSDATE() :: DATE
),
FINAL AS (
SELECT
d.day AS DAY,
COUNT(
t.tx_hash
) AS txns
FROM
date_range d
LEFT JOIN {{ ref('silver__atlas_nft_transactions') }}
t
ON t.day BETWEEN d.day - INTERVAL '29 day'
AND d.day
GROUP BY
d.day
)
SELECT
{{ dbt_utils.generate_surrogate_key(
['day']
) }} AS atlas_nft_30_trailing_id,
DAY,
txns,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp,
'{{ invocation_id }}' AS _invocation_id
FROM
FINAL

View File

@ -1,36 +0,0 @@
version: 2
models:
- name: silver__atlas_nft_30_trailing
description: |-
This incremental dbt model generates a summary of NFT transactions from the 'silver__atlas_nft_transactions' table. It provides a daily count of transactions, accounting for a 30-day lookback period for each day within the specified date range.
columns:
- name: atlas_nft_30_trailing_id
description: "{{ doc('id')}}"
tests:
- not_null
- unique
- name: day
description: "{{ doc('date')}}"
tests:
- not_null
- unique
- name: txns
description: "{{ doc('tx_count')}}"
tests:
- not_null
- name: inserted_timestamp
description: "{{doc('inserted_timestamp')}}"
tests:
- not_null
- name: modified_timestamp
description: "{{doc('modified_timestamp')}}"
tests:
- not_null
- name: _invocation_id
description: "{{doc('invocation_id')}}"

View File

@ -1,50 +0,0 @@
-- TODO slated for deprecation and drop
{{ config(
materialized = 'table',
unique_key = 'atlas_nft_detailed_id',
tags = ['atlas']
) }}
WITH nft_data AS (
SELECT
*
FROM
{{ ref('silver__atlas_nft_transactions') }}
)
SELECT
{{ dbt_utils.generate_surrogate_key(
['DAY', 'receiver_id']
) }} AS atlas_nft_detailed_id,
DAY,
receiver_id,
COUNT(
DISTINCT token_id
) AS tokens,
COUNT(
CASE
WHEN method_name = 'nft_transfer' THEN tx_hash
END
) AS all_transfers,
COUNT(
DISTINCT owner
) AS owners,
COUNT(*) AS transactions,
COUNT(
CASE
WHEN method_name != 'nft_transfer' THEN tx_hash
END
) AS mints,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp,
'{{ invocation_id }}' AS _invocation_id,
MAX(_inserted_timestamp) AS _inserted_timestamp
FROM
nft_data
GROUP BY
1,
2,
3
ORDER BY
4 DESC

View File

@ -1,61 +0,0 @@
version: 2
models:
- name: silver__atlas_nft_detailed
description: |-
This is an incremental dbt model that gives an overview of NFT transactions in NEAR.
columns:
- name: atlas_nft_detailed_id
description: "{{ doc('id')}}"
tests:
- not_null
- unique
- name: day
description: "{{ doc('date')}}"
tests:
- not_null
- name: receiver_id
description: "The identifier of the receiver in the NFT transaction."
tests:
- not_null
- name: tokens
description: "The count of unique tokens transferred to the receiver on the given day."
tests:
- not_null
- name: all_transfers
description: "The total number of 'nft_transfer' method transactions that occurred."
tests:
- not_null
- name: owners
description: "The count of distinct owners who have interacted with the NFT."
tests:
- not_null
- name: transactions
description: "{{ doc('tx_count')}}"
tests:
- not_null
- name: mints
description: "The count of transactions where the 'method_name' is not 'nft_transfer', indicating minting actions."
tests:
- not_null
- name: inserted_timestamp
description: "{{doc('inserted_timestamp')}}"
tests:
- not_null
- name: modified_timestamp
description: "{{doc('modified_timestamp')}}"
tests:
- not_null
- name: _invocation_id
description: "{{doc('invocation_id')}}"

View File

@ -1,57 +0,0 @@
-- TODO slated for deprecation and drop
{{ config(
materialized = 'table',
unique_key = 'atlas_nft_table_id',
tags = ['atlas']
) }}
WITH nft_data AS (
SELECT
*
FROM
{{ ref('silver__atlas_nft_transactions') }}
)
SELECT
{{ dbt_utils.generate_surrogate_key(
['receiver_id']
) }} AS atlas_nft_table_id,
receiver_id,
COUNT(
DISTINCT token_id
) AS tokens,
COUNT(
CASE
WHEN method_name = 'nft_transfer'
AND DAY >= (SYSDATE() :: DATE - INTERVAL '1 day') THEN tx_hash END
) AS transfers_24h,
COUNT(
CASE
WHEN method_name = 'nft_transfer'
AND DAY >= (SYSDATE() :: DATE - INTERVAL '3 day') THEN tx_hash END
) AS transfers_3d,
COUNT(
CASE
WHEN method_name = 'nft_transfer' THEN tx_hash
END
) AS all_transfers,
COUNT(
DISTINCT owner
) AS owners,
COUNT(*) AS transactions,
COUNT(
CASE
WHEN method_name != 'nft_transfer' THEN tx_hash
END
) AS mints,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp,
'{{ invocation_id }}' AS _invocation_id
FROM
nft_data
GROUP BY
1,
2
ORDER BY
3 DESC

View File

@ -1,66 +0,0 @@
version: 2
models:
- name: silver__atlas_nft_table
description: |-
This view model provides a breakdown of NFT transaction activities by receiver_id. It includes counts of unique tokens, transfers within the last 24 hours and 3 days, all transfers, unique owners, total transactions, and minting events.
columns:
- name: atlas_nft_table_id
description: "{ { doc('id')}}"
tests:
- unique
- not_null
- name: receiver_id
description: "{ { doc('receiver_id')}}"
tests:
- not_null
- name: tokens
description: "The count of unique tokens that have been received by the receiver_id."
tests:
- not_null
- name: transfers_24h
description: "The count of 'nft_transfer' transactions that occurred in the last 24 hours."
tests:
- not_null
- name: transfers_3d
description: "The count of 'nft_transfer' transactions that occurred in the last 3 days."
tests:
- not_null
- name: all_transfers
description: "The total count of 'nft_transfer' transactions."
tests:
- not_null
- name: owners
description: "The count of distinct owners that have interacted with the receiver's tokens."
tests:
- not_null
- name: transactions
description: "{{ doc('tx_count')}}"
tests:
- not_null
- name: mints
description: "The count of transactions where the method_name indicates a minting event rather than a transfer."
tests:
- not_null
- name: inserted_timestamp
description: "{{doc('inserted_timestamp')}}"
tests:
- not_null
- name: modified_timestamp
description: "{{doc('modified_timestamp')}}"
tests:
- not_null
- name: _invocation_id
description: "{{doc('invocation_id')}}"

View File

@ -1,107 +0,0 @@
-- TODO slated for deprecation and drop
{{ config(
materialized = "incremental",
cluster_by = ["day"],
unique_key = "atlas_nft_transactions_id",
merge_exclude_columns = ["inserted_timestamp"],
incremental_strategy = "merge",
tags = ['atlas']
) }}
WITH nft_mints AS (
SELECT
block_timestamp :: DATE AS DAY,
receipt_object_id,
tx_hash,
method_name,
receiver_id,
signer_id,
owner_id AS owner,
token_id,
_partition_by_block_number,
_inserted_timestamp,
modified_timestamp AS _modified_timestamp
FROM
{{ ref('silver__standard_nft_mint_s3') }}
WHERE
{% if var("MANUAL_FIX") %}
{{ partition_load_manual('no_buffer') }}
{% else %}
{% if var('IS_MIGRATION') %}
{{ incremental_load_filter('_inserted_timestamp') }}
{% else %}
{{ incremental_load_filter('_modified_timestamp') }}
{% endif %}
{% endif %}
),
nft_transfers AS (
SELECT
block_timestamp :: DATE AS DAY,
SPLIT(
action_id,
'-'
) [0] :: STRING AS receipt_object_id,
tx_hash,
method_name,
receiver_id,
signer_id,
args ['receiver_id'] AS owner,
args ['token_id'] AS token_id,
_partition_by_block_number,
_inserted_timestamp,
modified_timestamp AS _modified_timestamp
FROM
{{ ref('silver__actions_events_function_call_s3') }}
WHERE
method_name = 'nft_transfer'
AND {% if var("MANUAL_FIX") %}
{{ partition_load_manual('no_buffer') }}
{% else %}
{% if var('IS_MIGRATION') %}
{{ incremental_load_filter('_inserted_timestamp') }}
{% else %}
{{ incremental_load_filter('_modified_timestamp') }}
{% endif %}
{% endif %}
),
unioned_nft_data AS (
SELECT
*
FROM
nft_mints
UNION ALL
SELECT
*
FROM
nft_transfers
)
SELECT
{{ dbt_utils.generate_surrogate_key(
['receipt_object_id', 'method_name', 'token_id', 'owner']
) }} AS atlas_nft_transactions_id,
DAY,
tx_hash,
method_name,
receiver_id,
signer_id,
owner,
token_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp,
'{{ invocation_id }}' AS _invocation_id,
_partition_by_block_number,
_inserted_timestamp,
_modified_timestamp
FROM
unioned_nft_data
WHERE
-- failed receipts may have unparsable base64 FunctionCall args
token_id IS NOT NULL
AND owner IS NOT NULL
qualify ROW_NUMBER() over (
PARTITION BY atlas_nft_transactions_id
ORDER BY
_inserted_timestamp DESC
) = 1

View File

@ -1,77 +0,0 @@
version: 2
models:
- name: silver__atlas_nft_transactions
description: |-
This incremental dbt model unifies NFT minting and transfer data into a single view, providing a comprehensive look at NFT activities. It captures daily activities by transaction hash, method name, receiver ID, signer ID, owner, and token ID.
tests:
- dbt_utils.recency:
datepart: day
field: _inserted_timestamp
interval: 1
columns:
- name: atlas_nft_transactions_id
description: "{{doc('id')}}"
tests:
- unique
- not_null
- name: day
description: "{{doc('date')}}"
tests:
- not_null
- name: tx_hash
description: "{{doc('tx_hash')}}"
tests:
- not_null
- name: method_name
description: "{{doc('method_name')}}"
tests:
- not_null
- name: receiver_id
description: "{{doc('receiver_id')}}"
tests:
- not_null
- name: signer_id
description: "{{doc('signer_id')}}"
tests:
- not_null
- name: owner
description: "{{doc('owner')}}"
tests:
- not_null
- name: token_id
description: "{{doc('token_id')}}"
tests:
- not_null
- name: inserted_timestamp
description: "{{doc('inserted_timestamp')}}"
tests:
- not_null
- name: modified_timestamp
description: "{{doc('modified_timestamp')}}"
tests:
- not_null
- name: _invocation_id
description: "{{doc('invocation_id')}}"
- name: _partition_by_block_number
description: "{{doc('_partition_by_block_number')}}"
- name: _inserted_timestamp
description: "{{doc('_inserted_timestamp')}}"
tests:
- not_null
- name: _modified_timestamp
description: "{{doc('_modified_timestamp')}}"

View File

@ -8,14 +8,23 @@
WITH blocks AS (
SELECT
*
block_id,
block_timestamp,
block_author,
header_json :total_supply :: NUMBER AS total_supply,
header_json :epoch_id :: STRING AS epoch_id,
_partition_by_block_number
FROM
{{ ref('silver__streamline_blocks') }}
WHERE
{{ ref('silver__blocks_final') }}
{% if var("MANUAL_FIX") %}
{{ partition_load_manual('no_buffer') }}
WHERE {{ partition_load_manual('no_buffer') }}
{% else %}
{{ incremental_load_filter('_inserted_timestamp') }}
WHERE modified_timestamp >= (
SELECT
MAX(modified_timestamp)
FROM
{{ this }}
)
{% endif %}
),
epochs AS (

View File

@ -10,27 +10,27 @@
WITH receipts AS (
SELECT
receipt_object_id,
receipt_id AS receipt_object_id,
tx_hash,
block_timestamp,
receipt_actions,
receipt_json AS receipt_actions,
receiver_id,
status_value,
logs,
_partition_by_block_number,
_inserted_timestamp,
modified_timestamp AS _modified_timestamp
predecessor_id,
outcome_json :outcome :status :: VARIANT AS status_value,
outcome_json :outcome :logs :: ARRAY AS logs,
_partition_by_block_number
FROM
{{ ref('silver__streamline_receipts_final') }}
WHERE
{{ ref('silver__receipts_final') }}
{% if var("MANUAL_FIX") %}
WHERE
{{ partition_load_manual('no_buffer') }}
{% else %}
{% if var('IS_MIGRATION') %}
{{ incremental_load_filter('_inserted_timestamp') }}
{% else %}
{{ incremental_load_filter('_modified_timestamp') }}
{% endif %}
WHERE modified_timestamp >= (
SELECT
MAX(modified_timestamp)
FROM
{{ this }}
)
{% endif %}
),
FINAL AS (
@ -38,16 +38,14 @@ FINAL AS (
receipt_object_id,
tx_hash,
block_timestamp,
receipt_actions :predecessor_id :: STRING AS predecessor_id,
predecessor_id,
receiver_id,
receipt_actions AS actions,
object_keys(
status_value
) [0] :: STRING AS status,
logs,
_partition_by_block_number,
_inserted_timestamp,
_modified_timestamp
_partition_by_block_number
FROM
receipts
WHERE

View File

@ -1,6 +1,6 @@
{{ config(
materialized = 'ephemeral',
tags = ['helper', 'receipt_map','scheduled_core']
tags = ['helper', 'receipt_map']
) }}
SELECT

View File

@ -1,6 +1,6 @@
{{ config(
materialized = 'view',
tags = ['helper', 'receipt_map','scheduled_core']
tags = ['helper', 'receipt_map']
) }}
WITH receipts AS (

View File

@ -1,7 +1,7 @@
{{ config(
materalized = 'view',
unique_key = 'receipt_id',
tags = ['helper', 'receipt_map','scheduled_core']
tags = ['helper', 'receipt_map']
) }}
WITH

View File

@ -6,7 +6,7 @@
merge_exclude_columns = ['inserted_timestamp'],
cluster_by = ['block_timestamp::DATE','_inserted_timestamp::DATE', '_partition_by_block_number'],
unique_key = 'block_id',
tags = ['load', 'load_blocks','scheduled_core'],
tags = ['load', 'load_blocks', 'deprecated_lake_archive'],
full_refresh = False
) }}

View File

@ -5,7 +5,7 @@
merge_exclude_columns = ['inserted_timestamp'],
unique_key = 'receipt_id',
cluster_by = ['modified_timestamp::date', '_partition_by_block_number'],
tags = ['load', 'load_shards','scheduled_core']
tags = ['load', 'load_shards', 'deprecated_lake_archive']
) }}
WITH shards AS (

View File

@ -6,7 +6,7 @@
unique_key = 'receipt_object_id',
cluster_by = ['block_timestamp::DATE','modified_timestamp::DATE', '_partition_by_block_number', ],
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION ON EQUALITY(tx_hash,receipt_id,receiver_id,predecessor_id);",
tags = ['receipt_map','scheduled_core'],
tags = ['receipt_map', 'deprecated_lake_archive'],
full_refresh = False
) }}

View File

@ -5,7 +5,7 @@
merge_exclude_columns = ['inserted_timestamp'],
cluster_by = ['_inserted_timestamp::DATE', '_partition_by_block_number'],
unique_key = 'shard_id',
tags = ['load', 'load_shards','scheduled_core'],
tags = ['load', 'load_shards', 'deprecated_lake_archive'],
full_refresh = False
) }}

View File

@ -5,7 +5,7 @@
merge_exclude_columns = ['inserted_timestamp'],
unique_key = 'tx_hash',
cluster_by = ['modified_timestamp::date', '_partition_by_block_number'],
tags = ['load', 'load_shards','scheduled_core']
tags = ['load', 'load_shards', 'deprecated_lake_archive']
) }}
WITH chunks AS (

View File

@ -3,7 +3,7 @@
incremental_strategy = 'delete+insert',
unique_key = 'tx_hash',
cluster_by = ['block_timestamp::DATE','modified_timestamp::DATE', '_partition_by_block_number'],
tags = ['receipt_map','scheduled_core']
tags = ['receipt_map', 'deprecated_lake_archive']
) }}
WITH int_txs AS (

View File

@ -0,0 +1,34 @@
{{ config(
materialized = 'ephemeral'
) }}
SELECT
block_id,
block_timestamp,
block_hash,
prev_hash,
block_author,
chunks AS chunks_json,
header AS header_json,
_partition_by_block_number,
{{ dbt_utils.generate_surrogate_key(
['block_id']
) }} AS blocks_final_id,
COALESCE(
inserted_timestamp,
_inserted_timestamp,
_load_timestamp
) AS inserted_timestamp,
COALESCE(
modified_timestamp,
_inserted_timestamp,
_load_timestamp
) AS modified_timestamp,
_invocation_id
FROM
{{ ref('silver__streamline_blocks') }}
{% if var("NEAR_MIGRATE_ARCHIVE") %}
WHERE
{{ partition_load_manual('no_buffer') }}
{% endif %}

View File

@ -0,0 +1,79 @@
{{ config(
materialized = 'ephemeral'
) }}
WITH lake_receipts_final AS (
SELECT
chunk_hash,
block_id,
block_timestamp,
tx_hash,
COALESCE(
receipt_id,
receipt_object_id
) AS receipt_id,
COALESCE(
predecessor_id,
receipt_actions :predecessor_id :: STRING
) AS predecessor_id,
receiver_id,
receipt_actions AS receipt_json,
execution_outcome AS outcome_json,
receipt_succeeded,
_partition_by_block_number,
{{ dbt_utils.generate_surrogate_key(
['COALESCE(receipt_id, receipt_object_id)']
) }} AS receipts_final_id,
COALESCE(
inserted_timestamp,
_inserted_timestamp,
_load_timestamp
) AS inserted_timestamp,
COALESCE(
modified_timestamp,
_inserted_timestamp,
_load_timestamp
) AS modified_timestamp,
_invocation_id
FROM
{{ ref('silver__streamline_receipts_final') }}
{% if var("NEAR_MIGRATE_ARCHIVE") %}
WHERE
{{ partition_load_manual('no_buffer') }}
{% endif %}
),
lake_transactions_final AS (
SELECT
tx_hash,
tx_succeeded
FROM
{{ ref('silver__streamline_transactions_final') }}
{% if var("NEAR_MIGRATE_ARCHIVE") %}
WHERE
{{ partition_load_manual('front') }}
{% endif %}
)
SELECT
chunk_hash,
block_id,
block_timestamp,
r.tx_hash,
receipt_id,
predecessor_id,
receiver_id,
receipt_json,
outcome_json,
tx_succeeded,
receipt_succeeded,
_partition_by_block_number,
receipts_final_id,
inserted_timestamp,
modified_timestamp,
_invocation_id
FROM
lake_receipts_final r
LEFT JOIN lake_transactions_final tx
ON r.tx_hash = tx.tx_hash

View File

@ -0,0 +1,86 @@
{{ config(
materialized = 'ephemeral'
) }}
WITH lake_transactions_final AS (
SELECT
block_id,
block_timestamp,
tx_hash,
tx_signer,
tx_receiver,
tx_succeeded,
gas_used,
transaction_fee,
attached_gas,
_partition_by_block_number,
{{ dbt_utils.generate_surrogate_key(
['tx_hash']
) }} AS transactions_final_id,
COALESCE(
inserted_timestamp,
_inserted_timestamp,
_load_timestamp
) AS inserted_timestamp,
COALESCE(
modified_timestamp,
_inserted_timestamp,
_load_timestamp
) AS modified_timestamp,
_invocation_id
FROM
{{ ref('silver__streamline_transactions_final') }}
{% if var("NEAR_MIGRATE_ARCHIVE") %}
WHERE
{{ partition_load_manual('no_buffer') }}
{% endif %}
),
lake_transactions_int AS (
SELECT
tx_hash,
block_id,
shard_number,
chunk_hash,
tx :transaction :: variant AS transaction_json,
tx :outcome :execution_outcome :: variant AS outcome_json,
_partition_by_block_number
FROM
{{ ref('silver__streamline_transactions') }}
{% if var("NEAR_MIGRATE_ARCHIVE") %}
WHERE
{{ partition_load_manual('no_buffer') }}
{% endif %}
),
transaction_archive AS (
SELECT
i.chunk_hash,
i.shard_number AS shard_id,
f.block_id,
f.block_timestamp,
f.tx_hash,
f.tx_signer,
f.tx_receiver,
i.transaction_json,
i.outcome_json,
f.tx_succeeded,
f.gas_used,
f.transaction_fee,
f.attached_gas,
f._partition_by_block_number,
f.transactions_final_id,
f.inserted_timestamp,
f.modified_timestamp,
f._invocation_id
FROM
lake_transactions_final f
LEFT JOIN lake_transactions_int i
ON f.tx_hash = i.tx_hash
AND f._partition_by_block_number = i._partition_by_block_number
)
SELECT
*
FROM
transaction_archive

View File

@ -0,0 +1,70 @@
{{ config(
materialized = 'incremental',
incremental_predicates = ["dynamic_range_predicate","block_timestamp::date"],
incremental_strategy = 'merge',
merge_exclude_columns = ['inserted_timestamp'],
unique_key = 'block_id',
cluster_by = ['block_timestamp::DATE','modified_timestamp::DATE'],
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION ON EQUALITY(block_id, block_hash);",
tags = ['scheduled_core', 'core_v2'],
full_refresh = false
) }}
{% if var('NEAR_MIGRATE_ARCHIVE', False) %}
{% if execute %}
{% do log('Migrating blocks ' ~ var('RANGE_START') ~ ' to ' ~ var('RANGE_END'), info=True) %}
{% do log('Invocation ID: ' ~ invocation_id, info=True) %}
{% endif %}
SELECT
block_id,
block_timestamp,
block_hash,
prev_hash,
block_author,
chunks_json,
header_json,
_partition_by_block_number,
blocks_final_id,
inserted_timestamp,
modified_timestamp,
'{{ invocation_id }}' AS _invocation_id
FROM
{{ ref('_migrate_blocks') }}
{% else %}
WITH blocks AS (
SELECT
block_id,
block_timestamp,
block_hash,
block_json :header :prev_hash :: STRING AS prev_hash,
block_json :author :: STRING AS block_author,
block_json :chunks :: ARRAY AS chunks_json,
block_json :header :: OBJECT AS header_json,
partition_key AS _partition_by_block_number
FROM
{{ ref('silver__blocks_v2') }}
{% if is_incremental() %}
WHERE
modified_timestamp >= (
SELECT
COALESCE(MAX(modified_timestamp), '1970-01-01')
FROM
{{ this }}
)
{% endif %}
)
SELECT
*,
{{ dbt_utils.generate_surrogate_key(
['block_id']
) }} AS blocks_final_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp,
'{{ invocation_id }}' AS _invocation_id
FROM
blocks
{% endif %}

View File

@ -0,0 +1,75 @@
version: 2
models:
- name: silver__blocks_final
description: |-
Table containing blocks for NEAR.
tests:
- dbt_utils.recency:
datepart: hour
field: inserted_timestamp
interval: 1
columns:
- name: block_id
description: "{{ doc('block_id') }}"
tests:
- not_null
- unique
- name: block_timestamp
description: "{{ doc('block_timestamp') }}"
tests:
- not_null
- name: block_hash
description: "{{ doc('block_hash') }}"
tests:
- not_null
- unique
- name: prev_hash
description: "{{ doc('prev_hash') }}"
tests:
- not_null
- unique
- name: block_author
description: "{{ doc('block_author') }}"
tests:
- not_null
- name: chunks_json
description: "{{ doc('chunks_json') }}"
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- VARIANT
- ARRAY
- OBJECT
- name: header_json
description: "{{ doc('header_json') }}"
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- VARIANT
- OBJECT
- name: _partition_by_block_number
description: "{{ doc('_partition_by_block_number') }}"
- name: blocks_final_id
description: "{{ doc('id') }}"
tests:
- not_null
- unique
- name: inserted_timestamp
description: "{{ doc('inserted_timestamp') }}"
- name: modified_timestamp
description: "{{ doc('modified_timestamp') }}"
- name: _invocation_id

View File

@ -5,8 +5,7 @@
incremental_strategy = 'merge',
incremental_predicates = ["dynamic_range_predicate","block_timestamp::date"],
unique_key = "block_hash",
cluster_by = ['modified_timestamp::DATE','partition_key'],
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(block_hash)",
cluster_by = ['modified_timestamp::DATE','block_timestamp::date'],
tags = ['scheduled_core', 'core_v2']
) }}

View File

@ -5,8 +5,7 @@
incremental_strategy = 'merge',
incremental_predicates = ["dynamic_range_predicate","block_timestamp::date"],
unique_key = "chunk_hash",
cluster_by = ['modified_timestamp::DATE','partition_key'],
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(chunk_hash)",
cluster_by = ['modified_timestamp::DATE','block_timestamp::date'],
tags = ['scheduled_core', 'core_v2']
) }}
@ -15,8 +14,7 @@ WITH bronze_chunks AS (
SELECT
VALUE :BLOCK_ID :: INT AS block_id,
VALUE :BLOCK_TIMESTAMP_EPOCH :: INT AS block_timestamp_epoch,
DATA :header :shard_id :: INT AS shard_id,
DATA :header :chunk_hash :: STRING AS chunk_hash,
VALUE :CHUNK_HASH :: STRING AS chunk_hash,
partition_key,
DATA :: variant AS chunk_json,
_inserted_timestamp
@ -38,11 +36,10 @@ WHERE
{% endif %}
)
SELECT
chunk_hash,
block_id,
block_timestamp_epoch,
TO_TIMESTAMP_NTZ(block_timestamp_epoch, 9) AS block_timestamp,
shard_id,
chunk_hash,
partition_key,
chunk_json,
_inserted_timestamp,

View File

@ -0,0 +1,248 @@
{{ config(
materialized = 'incremental',
incremental_predicates = ["dynamic_range_predicate_custom","block_timestamp::date"],
incremental_strategy = 'merge',
merge_exclude_columns = ['inserted_timestamp'],
unique_key = 'receipt_id',
cluster_by = ['block_timestamp::DATE','modified_timestamp::DATE'],
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION ON EQUALITY(tx_hash,receipt_id,receiver_id,predecessor_id);",
tags = ['scheduled_core', 'core_v2'],
full_refresh = false
) }}
{% if var('NEAR_MIGRATE_ARCHIVE', False) %}
{% if execute %}
{% do log('Migrating receipts ' ~ var('RANGE_START') ~ ' to ' ~ var('RANGE_END'), info=True) %}
{% do log('Invocation ID: ' ~ invocation_id, info=True) %}
{% endif %}
SELECT
chunk_hash,
block_id,
block_timestamp,
tx_hash,
receipt_id,
predecessor_id,
receiver_id,
receipt_json,
outcome_json,
tx_succeeded,
receipt_succeeded,
_partition_by_block_number,
receipts_final_id,
inserted_timestamp,
modified_timestamp,
'{{ invocation_id }}' AS _invocation_id
FROM
{{ ref('_migrate_receipts') }}
{% else %}
{% if execute and not var("MANUAL_FIX") %}
{% if is_incremental() %}
{% set max_mod_query %}
SELECT
COALESCE(MAX(modified_timestamp), '1970-01-01') modified_timestamp
FROM
{{ this }}
{% endset %}
{% set max_mod = run_query(max_mod_query) [0] [0] %}
{% do log('max_mod: ' ~ max_mod, info=True) %}
{% set min_block_date_query %}
SELECT
MIN(origin_block_timestamp :: DATE) block_timestamp
FROM
{{ ref('silver__transactions_v2') }}
WHERE
modified_timestamp >= '{{max_mod}}'
{% endset %}
{% set min_bd = run_query(min_block_date_query) [0] [0] %}
{% do log('min_bd: ' ~ min_bd, info=True) %}
{% if not min_bd or min_bd == 'None' %}
{% set min_bd = '2099-01-01' %}
{% do log('min_bd: ' ~ min_bd, info=True) %}
{% endif %}
{% do log('min_block_date: ' ~ min_bd, info=True) %}
{% endif %}
{% endif %}
WITH txs_with_receipts AS (
SELECT
chunk_hash,
origin_block_id,
origin_block_timestamp,
tx_hash,
response_json,
response_json :transaction_outcome :outcome :receipt_ids [0] :: STRING AS initial_receipt_id,
response_json :status :Failure IS NULL AS tx_succeeded,
partition_key AS _partition_by_block_number,
modified_timestamp
FROM
{{ ref('silver__transactions_v2') }}
{% if var("MANUAL_FIX") %}
WHERE
{{ partition_load_manual('no_buffer', 'partition_key') }}
{% else %}
{% if is_incremental() %}
WHERE origin_block_timestamp :: DATE >= '{{min_bd}}'
{% endif %}
{% endif %}
),
blocks AS (
SELECT
block_id,
block_hash,
block_timestamp,
modified_timestamp
FROM
{{ ref('silver__blocks_v2') }}
{% if var("MANUAL_FIX") %}
WHERE
{{ partition_load_manual('no_buffer', 'partition_key') }}
{% else %}
{% if is_incremental() %}
WHERE block_timestamp :: DATE >= '{{min_bd}}' :: DATE
{% endif %}
{% endif %}
),
flatten_receipts AS (
SELECT
origin_block_timestamp,
chunk_hash,
tx_hash,
tx_succeeded,
VALUE :receipt_id :: STRING AS receipt_id,
VALUE :: variant AS receipt_json,
_partition_by_block_number,
modified_timestamp
FROM
txs_with_receipts,
LATERAL FLATTEN(
input => response_json :receipts :: ARRAY
)
),
flatten_receipt_outcomes AS (
SELECT
VALUE :block_hash :: STRING AS block_hash,
tx_hash,
VALUE :id :: STRING AS receipt_id,
VALUE :: variant AS outcome_json
FROM
txs_with_receipts,
LATERAL FLATTEN(
input => response_json :receipts_outcome :: ARRAY
)
),
receipts_full AS (
SELECT
chunk_hash,
ro.block_hash,
block_id,
block_timestamp,
r.tx_hash,
r.receipt_id,
receipt_json,
outcome_json,
tx_succeeded,
_partition_by_block_number
FROM
flatten_receipts r
LEFT JOIN flatten_receipt_outcomes ro
ON r.receipt_id = ro.receipt_id
LEFT JOIN blocks b
ON ro.block_hash = b.block_hash
{% if is_incremental() and not var("MANUAL_FIX") %}
WHERE
GREATEST(
COALESCE(r.modified_timestamp, '1970-01-01'),
COALESCE(b.modified_timestamp, '1970-01-01')
) >= '{{max_mod}}'
{% endif %}
),
initial_receipt_full AS (
SELECT
chunk_hash,
origin_block_id AS block_id,
origin_block_timestamp AS block_timestamp,
txr.tx_hash,
initial_receipt_id AS receipt_id,
OBJECT_CONSTRUCT(
'predecessor_id', response_json :transaction :signer_id :: STRING,
'priority', response_json :transaction :priority_fee :: INTEGER,
'receipt', OBJECT_CONSTRUCT(
'Action', OBJECT_CONSTRUCT(
'actions', response_json :transaction :actions :: ARRAY,
'gas_price', Null,
'input_data_ids', Null,
'is_promise_yield', Null,
'output_data_receivers', Null,
'signer_id', response_json :transaction :signer_id :: STRING,
'signer_public_key', response_json :transaction :public_key :: STRING
)
),
'receipt_id', initial_receipt_id :: STRING,
'receiver_id', response_json :transaction :receiver_id :: STRING
) AS receipt_json,
outcome_json,
tx_succeeded,
_partition_by_block_number
FROM
txs_with_receipts txr
LEFT JOIN flatten_receipt_outcomes ro
ON txr.initial_receipt_id = ro.receipt_id
AND txr.tx_hash = ro.tx_hash
{% if is_incremental() and not var("MANUAL_FIX") %}
WHERE
modified_timestamp >= '{{max_mod}}'
{% endif %}
),
FINAL AS (
SELECT
chunk_hash,
block_id,
block_timestamp,
tx_hash,
receipt_id,
receipt_json :predecessor_id :: STRING AS predecessor_id,
receipt_json :receiver_id :: STRING AS receiver_id,
receipt_json,
outcome_json,
tx_succeeded,
outcome_json :outcome :status :Failure IS NULL AS receipt_succeeded,
_partition_by_block_number
FROM
receipts_full
UNION ALL
SELECT
chunk_hash,
block_id,
block_timestamp,
tx_hash,
receipt_id,
receipt_json :predecessor_id :: STRING AS predecessor_id,
receipt_json :receiver_id :: STRING AS receiver_id,
receipt_json,
outcome_json,
tx_succeeded,
tx_succeeded AS receipt_succeeded,
_partition_by_block_number
FROM
initial_receipt_full
)
SELECT
*,
{{ dbt_utils.generate_surrogate_key(
['receipt_id']
) }} AS receipts_final_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp,
'{{ invocation_id }}' AS _invocation_id
FROM
FINAL
qualify(row_number() over (partition by receipt_id order by block_id is not null desc, modified_timestamp desc) = 1)
{% endif %}

View File

@ -0,0 +1,97 @@
version: 2
models:
- name: silver__receipts_final
description: |-
Table containing transaction receipts for NEAR blockchain.
tests:
- dbt_utils.recency:
datepart: hour
field: inserted_timestamp
interval: 1
columns:
- name: chunk_hash
description: "{{ doc('chunk_hash') }}"
tests:
- not_null
- name: block_id
description: "{{ doc('block_id') }}"
tests:
- not_null
- name: block_timestamp
description: "{{ doc('block_timestamp') }}"
tests:
- not_null
- name: tx_hash
description: "{{ doc('tx_hash') }}"
tests:
- not_null
- name: receipt_id
description: "{{ doc('receipt_id') }}"
tests:
- not_null
- unique
- name: predecessor_id
description: "{{ doc('predecessor_id') }}"
tests:
- not_null
- name: receiver_id
description: "{{ doc('receiver_id') }}"
tests:
- not_null
- name: receipt_json
description: "{{ doc('receipt_json') }}"
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- VARIANT
- OBJECT
- name: outcome_json
description: "{{ doc('outcome_json') }}"
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- VARIANT
- OBJECT
- name: tx_succeeded
description: "{{ doc('tx_succeeded') }}"
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- BOOLEAN
- name: receipt_succeeded
description: "{{ doc('receipt_succeeded') }}"
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- BOOLEAN
- name: _partition_by_block_number
description: "{{ doc('_partition_by_block_number') }}"
- name: receipts_final_id
description: "{{ doc('id') }}"
tests:
- not_null
- name: inserted_timestamp
description: "{{ doc('inserted_timestamp') }}"
- name: modified_timestamp
description: "{{ doc('modified_timestamp') }}"
- name: _invocation_id
description: "{{ doc('invocation_id') }}"

View File

@ -0,0 +1,146 @@
{{ config(
materialized = 'incremental',
incremental_predicates = ["dynamic_range_predicate","block_timestamp::date"],
incremental_strategy = 'merge',
merge_exclude_columns = ['inserted_timestamp'],
unique_key = 'tx_hash',
cluster_by = ['block_timestamp::DATE','modified_timestamp::DATE'],
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION ON EQUALITY(tx_hash,tx_signer,tx_receiver);",
tags = ['scheduled_core', 'core_v2'],
full_refresh = false
) }}
{% if var('NEAR_MIGRATE_ARCHIVE', False) %}
{% if execute %}
{% do log('Migrating transactions ' ~ var('RANGE_START') ~ ' to ' ~ var('RANGE_END'), info=True) %}
{% do log('Invocation ID: ' ~ invocation_id, info=True) %}
{% endif %}
SELECT
chunk_hash,
block_id,
block_timestamp,
tx_hash,
tx_receiver,
tx_signer,
transaction_json,
outcome_json,
OBJECT_CONSTRUCT() AS status_json,
tx_succeeded,
gas_used,
transaction_fee,
attached_gas,
_partition_by_block_number,
transactions_final_id,
inserted_timestamp,
modified_timestamp,
'{{ invocation_id }}' AS _invocation_id
FROM
{{ ref('_migrate_txs') }}
{% else %}
WITH txs_with_receipts AS (
SELECT
chunk_hash,
origin_block_id AS block_id,
origin_block_timestamp AS block_timestamp,
tx_hash,
response_json :transaction :: variant AS transaction_json,
response_json :transaction_outcome :outcome :: variant AS outcome_json,
response_json :status :: variant AS status_json,
response_json :receipts_outcome :: ARRAY AS receipts_outcome_json,
response_json :status :Failure IS NULL AS tx_succeeded,
partition_key AS _partition_by_block_number
FROM
{{ ref('silver__transactions_v2') }}
{% if is_incremental() %}
WHERE
modified_timestamp >= (
SELECT
COALESCE(MAX(modified_timestamp), '1970-01-01')
FROM
{{ this }}
)
{% endif %}
),
determine_receipt_gas_burnt AS (
SELECT
tx_hash,
SUM(
ZEROIFNULL(VALUE :outcome :gas_burnt :: INT)
) AS total_gas_burnt_receipts,
SUM(
ZEROIFNULL(VALUE :outcome :tokens_burnt :: INT)
) AS total_tokens_burnt_receipts
FROM
txs_with_receipts,
LATERAL FLATTEN (
input => receipts_outcome_json
)
GROUP BY
1
),
determine_attached_gas AS (
SELECT
tx_hash,
SUM(
VALUE :FunctionCall :gas :: INT
) AS total_attached_gas
FROM
txs_with_receipts,
LATERAL FLATTEN (
input => transaction_json :actions :: ARRAY
)
GROUP BY
1
),
transactions_final AS (
SELECT
chunk_hash,
block_id,
block_timestamp,
t.tx_hash,
transaction_json,
outcome_json,
status_json,
total_gas_burnt_receipts,
total_tokens_burnt_receipts,
total_attached_gas,
tx_succeeded,
_partition_by_block_number
FROM
txs_with_receipts t
LEFT JOIN determine_receipt_gas_burnt d USING (tx_hash)
LEFT JOIN determine_attached_gas A USING (tx_hash)
)
SELECT
chunk_hash,
block_id,
block_timestamp,
tx_hash,
transaction_json :receiver_id :: STRING AS tx_receiver,
transaction_json :signer_id :: STRING AS tx_signer,
transaction_json,
outcome_json,
status_json,
tx_succeeded,
ZEROIFNULL(outcome_json :gas_burnt :: INT) + total_gas_burnt_receipts AS gas_used,
ZEROIFNULL(outcome_json :tokens_burnt :: INT) + total_tokens_burnt_receipts AS transaction_fee,
COALESCE(
total_attached_gas,
gas_used
) AS attached_gas,
_partition_by_block_number,
{{ dbt_utils.generate_surrogate_key(
['tx_hash']
) }} AS transactions_final_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp,
'{{ invocation_id }}' AS _invocation_id
FROM
transactions_final
{% endif %}

View File

@ -0,0 +1,119 @@
version: 2
models:
- name: silver__transactions_final
description: |-
Table containing finalized transactions for NEAR blockchain.
tests:
- dbt_utils.recency:
datepart: hour
field: inserted_timestamp
interval: 1
columns:
- name: chunk_hash
description: "{{ doc('chunk_hash') }}"
tests:
- not_null
- name: block_id
description: "{{ doc('block_id') }}"
tests:
- not_null
- name: block_timestamp
description: "{{ doc('block_timestamp') }}"
tests:
- not_null
- name: tx_hash
description: "{{ doc('tx_hash') }}"
tests:
- not_null
- unique
- name: tx_receiver
description: "{{ doc('tx_receiver') }}"
tests:
- not_null
- name: tx_signer
description: "{{ doc('tx_signer') }}"
tests:
- not_null
- name: transaction_json
description: "{{ doc('transaction_json') }}"
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- VARIANT
- OBJECT
- name: outcome_json
description: "{{ doc('outcome_json') }}"
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- VARIANT
- OBJECT
- name: status_json
description: "{{ doc('status_json') }}"
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- VARIANT
- OBJECT
- name: tx_succeeded
description: "{{ doc('tx_succeeded') }}"
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- BOOLEAN
- name: gas_used
description: "{{ doc('gas_used') }}"
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: transaction_fee
description: "{{ doc('transaction_fee') }}"
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: attached_gas
description: "{{ doc('attached_gas') }}"
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: _partition_by_block_number
description: "{{ doc('_partition_by_block_number') }}"
- name: transactions_final_id
description: "{{ doc('id') }}"
tests:
- not_null
- name: inserted_timestamp
description: "{{ doc('inserted_timestamp') }}"
- name: modified_timestamp
description: "{{ doc('modified_timestamp') }}"
- name: _invocation_id
description: "{{ doc('invocation_id') }}"

View File

@ -3,18 +3,21 @@
{{ config (
materialized = "incremental",
incremental_strategy = 'merge',
incremental_predicates = ["dynamic_range_predicate","block_timestamp::date"],
incremental_predicates = ["dynamic_range_predicate","origin_block_timestamp::date"],
unique_key = "tx_hash",
cluster_by = ['modified_timestamp::DATE','partition_key'],
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(tx_hash)",
cluster_by = ['modified_timestamp::DATE','origin_block_timestamp::date'],
tags = ['scheduled_core', 'core_v2']
) }}
WITH bronze_transactions AS (
SELECT
VALUE :BLOCK_ID :: INT AS block_id,
VALUE :BLOCK_TIMESTAMP_EPOCH :: INT AS block_timestamp_epoch,
VALUE :BLOCK_ID :: INT AS origin_block_id,
VALUE :BLOCK_TIMESTAMP_EPOCH :: INT AS origin_block_timestamp_epoch,
VALUE :SHARD_ID :: INT AS shard_id,
VALUE :CHUNK_HASH :: STRING AS chunk_hash,
VALUE :HEIGHT_CREATED :: INT AS chunk_height_created,
VALUE :HEIGHT_INCLUDED :: INT AS chunk_height_included,
DATA :transaction :hash :: STRING AS tx_hash,
DATA :transaction :signer_id :: STRING AS signer_id,
partition_key,
@ -38,9 +41,13 @@ WHERE
{% endif %}
)
SELECT
block_id,
block_timestamp_epoch,
TO_TIMESTAMP_NTZ(block_timestamp_epoch, 9) AS block_timestamp,
origin_block_id,
origin_block_timestamp_epoch,
TO_TIMESTAMP_NTZ(origin_block_timestamp_epoch, 9) AS origin_block_timestamp,
shard_id,
chunk_hash,
chunk_height_created,
chunk_height_included,
tx_hash,
signer_id,
partition_key,

View File

@ -1,258 +0,0 @@
{{ config(
materialized = "incremental",
unique_key = "swap_id",
incremental_strategy = "merge",
merge_exclude_columns = ["inserted_timestamp"],
cluster_by = ["block_timestamp::DATE"],
tags = ['curated','scheduled_non_core'],
enabled = False
) }}
{# DEPRECATED JANUARY 2024 #}
WITH base_swap_calls AS (
SELECT
block_id,
block_timestamp,
tx_hash,
action_id,
args,
_inserted_timestamp,
method_name
FROM
{{ ref('silver__actions_events_function_call_s3') }}
WHERE
method_name IN (
'swap',
'ft_transfer_call'
) {% if var("MANUAL_FIX") %}
AND {{ partition_load_manual('no_buffer') }}
{% else %}
AND {{ incremental_load_filter('_inserted_timestamp') }}
{% endif %}
),
base_swaps AS (
SELECT
block_id,
block_timestamp,
tx_hash,
action_id,
IFF(
method_name = 'ft_transfer_call',
TRY_PARSE_JSON(TRY_PARSE_JSON(args) :msg),
TRY_PARSE_JSON(args)
) :actions AS actions,
_inserted_timestamp
FROM
base_swap_calls
),
agg_swaps AS (
SELECT
tx_hash,
ANY_VALUE(block_id) AS block_id,
ANY_VALUE(block_timestamp) AS block_timestamp,
ARRAY_AGG(
action.value
) within GROUP (
ORDER BY
action_id,
action.index
) AS action_list,
ANY_VALUE(_inserted_timestamp) AS _inserted_timestamp
FROM
base_swaps,
LATERAL FLATTEN(
input => actions
) action
GROUP BY
1
),
actions AS (
SELECT
block_id,
block_timestamp,
tx_hash,
NULLIF(
action.value :pool_id,
NULL
) AS pool_id,
NULLIF(
action.value :token_in,
NULL
) :: text AS token_in,
NULLIF(
action.value :token_out,
NULL
) :: text AS token_out,
action.index AS swap_index,
_inserted_timestamp
FROM
agg_swaps,
LATERAL FLATTEN(
input => action_list
) action
WHERE
NOT RLIKE(
pool_id,
'.*[a-z].*',
'i'
)
),
receipts AS (
SELECT
block_id,
tx_hash,
-- TODO use the receipt succeeded column here
CASE
WHEN PARSE_JSON(
r.status_value
) :Failure IS NOT NULL THEN 'Fail'
ELSE 'Success'
END AS success_or_fail,
logs
FROM
{{ ref("silver__streamline_receipts_final") }}
r
WHERE
tx_hash IN (
SELECT
tx_hash
FROM
actions
)
),
flat_receipts AS (
SELECT
tx_hash,
l.value,
l.index,
success_or_fail
FROM
receipts,
LATERAL FLATTEN(
input => logs
) l
),
swap_logs AS (
SELECT
tx_hash,
ROW_NUMBER() over (
PARTITION BY tx_hash
ORDER BY
INDEX ASC
) - 1 AS swap_index,
VALUE,
success_or_fail
FROM
flat_receipts
WHERE
VALUE LIKE 'Swapped%'
),
transactions AS (
SELECT
block_id,
block_timestamp,
tx_hash,
tx_signer,
tx_receiver
FROM
{{ ref("silver__streamline_transactions_final") }}
WHERE
tx_hash IN (
SELECT
tx_hash
FROM
actions
)
),
token_labels AS (
SELECT
*
FROM
{{ ref("silver__token_labels") }}
),
final_table AS (
SELECT
swap_logs.swap_index,
actions._inserted_timestamp,
actions.block_id,
actions.block_timestamp,
swap_logs.tx_hash,
CONCAT(
swap_logs.tx_hash,
'-',
swap_logs.swap_index
) AS swap_id,
swap_logs.value AS log_data,
transactions.tx_signer AS trader,
transactions.tx_receiver AS platform,
LAST_VALUE(
swap_logs.success_or_fail
) over (
PARTITION BY swap_logs.tx_hash
ORDER BY
swap_logs.success_or_fail DESC
) AS txn_status,
actions.pool_id :: INT AS pool_id,
actions.token_in,
actions.token_out
FROM
actions
INNER JOIN swap_logs
ON (
swap_logs.tx_hash = actions.tx_hash
AND swap_logs.swap_index = actions.swap_index
)
JOIN transactions
ON actions.tx_hash = transactions.tx_hash
),
FINAL AS (
SELECT
block_id,
block_timestamp,
tx_hash,
swap_id,
platform,
trader,
pool_id,
token_in,
token_labels_in.symbol AS token_in_symbol,
REGEXP_SUBSTR(
log_data,
'Swapped (\\d+)',
1,
1,
'e'
) :: NUMBER AS amount_in_raw,
amount_in_raw / pow(10, IFNULL(token_labels_in.decimals, 0)) AS amount_in,
token_out,
token_labels_out.symbol AS token_out_symbol,
REGEXP_SUBSTR(
log_data,
'Swapped \\d+ .+ for (\\d+)',
1,
1,
'e'
) :: NUMBER AS amount_out_raw,
amount_out_raw / pow(10, IFNULL(token_labels_out.decimals, 0)) AS amount_out,
swap_index,
_inserted_timestamp
FROM
final_table
LEFT JOIN token_labels AS token_labels_in
ON final_table.token_in = token_labels_in.token_contract
LEFT JOIN token_labels AS token_labels_out
ON final_table.token_out = token_labels_out.token_contract
WHERE
txn_status = 'Success'
AND log_data IS NOT NULL
)
SELECT
*,
{{ dbt_utils.generate_surrogate_key(
['swap_id']
) }} AS dex_swaps_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp,
'{{ invocation_id }}' AS _invocation_id
FROM
FINAL

View File

@ -1,72 +0,0 @@
version: 2
models:
- name: silver__dex_swaps_s3
description: |-
This table records all the swap transactions occurring in NEAR. This model is being deprecated as of January 2024. It will remain live through February for users to migrate to the new model.
This logic is outdated / inaccurate.
columns:
- name: BLOCK_ID
description: "{{ doc('block_id')}}"
- name: BLOCK_TIMESTAMP
description: "{{ doc('block_timestamp')}}"
tests:
- not_null:
where: _inserted_timestamp <= CURRENT_TIMESTAMP - interval '1 hour'
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_NTZ
- name: TX_HASH
description: "{{ doc('tx_hash')}}"
tests:
- not_null:
where: _inserted_timestamp <= CURRENT_TIMESTAMP - interval '1 hour'
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- STRING
- VARCHAR
- name: SWAP_ID
description: "{{ doc('swap_id')}}"
- name: PLATFORM
description: "{{ doc('platform')}}"
- name: TRADER
description: "{{ doc('trader')}}"
- name: POOL_ID
description: "{{ doc('pool_id')}}"
- name: TOKEN_IN
description: "{{ doc('token_in')}}"
- name: AMOUNT_IN
description: "{{ doc('amount_in')}}"
- name: TOKEN_OUT
description: "{{ doc('token_out')}}"
- name: AMOUNT_OUT
description: "{{ doc('amount_out')}}"
- name: SWAP_INDEX
description: "{{ doc('swap_index')}}"
- name: _INSERTED_TIMESTAMP
description: "{{ doc('_inserted_timestamp')}}"
- name: DEX_SWAPS_ID
description: "{{doc('id')}}"
- name: INSERTED_TIMESTAMP
description: "{{doc('inserted_timestamp')}}"
- name: MODIFIED_TIMESTAMP
description: "{{doc('modified_timestamp')}}"
- name: _INVOCATION_ID
description: "{{doc('invocation_id')}}"

View File

@ -10,7 +10,7 @@
) }}
{# Note - multisource model #}
-- depends on {{ ref('silver__logs_s3') }}
-- depends on {{ ref('silver__streamline_receipts_final') }}
-- depends on {{ ref('silver__receipts_final') }}
{% if execute %}
@ -49,7 +49,7 @@
SELECT
MIN(block_timestamp) block_timestamp
FROM
{{ ref('silver__streamline_receipts_final') }} A
{{ ref('silver__receipts_final') }} A
WHERE
modified_timestamp >= '{{max_mod}}'
)
@ -78,7 +78,6 @@ WITH swap_logs AS (
log_index,
clean_log,
_partition_by_block_number,
_inserted_timestamp,
modified_timestamp
FROM
{{ ref('silver__logs_s3') }}
@ -97,15 +96,14 @@ WITH swap_logs AS (
),
receipts AS (
SELECT
receipt_object_id,
receipt_actions,
receipt_id AS receipt_object_id,
receipt_json AS receipt_actions,
receiver_id,
signer_id,
receipt_json :receipt :Action :signer_id :: STRING AS signer_id,
_partition_by_block_number,
_inserted_timestamp,
modified_timestamp
FROM
{{ ref('silver__streamline_receipts_final') }}
{{ ref('silver__receipts_final') }}
WHERE
receipt_object_id IN (
SELECT
@ -157,7 +155,6 @@ swap_outcome AS (
'\\1'
) :: STRING AS token_out,
_partition_by_block_number,
_inserted_timestamp,
modified_timestamp
FROM
swap_logs
@ -212,8 +209,7 @@ parse_actions AS (
) AS swap_input_data,
r.receiver_id AS receipt_receiver_id,
r.signer_id AS receipt_signer_id,
o._partition_by_block_number,
o._inserted_timestamp
o._partition_by_block_number
FROM
swap_outcome o
LEFT JOIN receipts r USING (receipt_object_id)
@ -241,8 +237,7 @@ FINAL AS (
token_in,
swap_input_data,
LOG,
_partition_by_block_number,
_inserted_timestamp
_partition_by_block_number
FROM
parse_actions
)

View File

@ -53,15 +53,15 @@ WITH actions_events AS (
status_value AS (
SELECT
tx_hash,
status_value,
TRY_PARSE_JSON(REPLACE(LOGS[0] :: STRING, 'EVENT_JSON:', '')) AS event,
PARSE_JSON(BASE64_DECODE_STRING(status_value:SuccessValue)) as SuccessValue,
outcome_json :outcome :status AS status_value,
TRY_PARSE_JSON(REPLACE(outcome_json :outcome :logs[0] :: STRING, 'EVENT_JSON:', '')) AS event,
PARSE_JSON(BASE64_DECODE_STRING(outcome_json :outcome :status :SuccessValue)) as SuccessValue,
_partition_by_block_number,
_inserted_timestamp
inserted_timestamp AS _inserted_timestamp
FROM
{{ ref('silver__streamline_receipts_final') }}
{{ ref('silver__receipts_final') }}
WHERE
receipt_actions:predecessor_id = 'marketplace.paras.near'
predecessor_id = 'marketplace.paras.near'
AND
event:event = 'nft_transfer'

View File

@ -47,10 +47,9 @@ tx AS (
tx_signer,
tx_receiver,
tx_succeeded,
tx_status, -- TODO deprecate col
transaction_fee
FROM
{{ ref('silver__streamline_transactions_final') }}
{{ ref('silver__transactions_final') }}
{% if var("MANUAL_FIX") %}
WHERE {{ partition_load_manual('no_buffer') }}
{% else %}
@ -214,7 +213,6 @@ mint_tx AS (
tx_signer,
tx_receiver,
tx_succeeded,
tx_status,
transaction_fee
FROM
tx
@ -239,7 +237,6 @@ FINAL AS (
mint_tx.tx_signer AS tx_signer,
mint_tx.tx_receiver AS tx_receiver,
mint_tx.tx_succeeded AS tx_succeeded,
mint_tx.tx_status AS tx_status,
mint_events.receipt_object_id,
mint_events.receiver_id,
mint_events.signer_id,

View File

@ -16,9 +16,9 @@ WITH txs AS (
tx_hash,
tx_succeeded,
_partition_by_block_number,
_inserted_timestamp
inserted_timestamp AS _inserted_timestamp
FROM
{{ ref('silver__streamline_transactions_final') }}
{{ ref('silver__transactions_final') }}
{% if var("MANUAL_FIX") %}
WHERE {{ partition_load_manual('no_buffer') }}

View File

@ -15,17 +15,16 @@ WITH receipts AS (
block_id,
block_timestamp,
tx_hash,
receipt_object_id,
logs,
receipt_id,
outcome_json :outcome :logs AS logs,
receiver_id,
receipt_actions :predecessor_id :: STRING AS predecessor_id, -- TODO once exists in receipts final can select directly
signer_id,
gas_burnt,
predecessor_id,
receipt_json :receipt :Action :signer_id :: STRING AS signer_id,
outcome_json :outcome :gas_burnt AS gas_burnt,
receipt_succeeded,
_partition_by_block_number,
_inserted_timestamp
_partition_by_block_number
FROM
{{ ref('silver__streamline_receipts_final') }}
{{ ref('silver__receipts_final') }}
{% if var("MANUAL_FIX") %}
WHERE {{ partition_load_manual('no_buffer') }}
@ -45,10 +44,10 @@ FINAL AS (
block_id,
block_timestamp,
tx_hash,
receipt_object_id,
receipt_id,
concat_ws(
'-',
receipt_object_id,
receipt_id,
INDEX
) AS log_id,
INDEX AS log_index,
@ -63,8 +62,7 @@ FINAL AS (
VALUE ILIKE 'event_json:%' AS is_standard,
gas_burnt,
receipt_succeeded,
_partition_by_block_number,
_inserted_timestamp
_partition_by_block_number
FROM
receipts,
LATERAL FLATTEN(
@ -73,6 +71,7 @@ FINAL AS (
)
SELECT
*,
receipt_id AS receipt_object_id, -- maintain for a run but then need to copy values over and then drop
{{ dbt_utils.generate_surrogate_key(
['log_id']
) }} AS logs_id,

View File

@ -12,16 +12,16 @@ WITH receipts AS (
tx_hash,
block_id,
block_timestamp,
receipt_object_id,
receipt_id AS receipt_object_id,
receiver_id,
signer_id,
receipt_actions :predecessor_id :: STRING AS predecessor_id,
status_value,
logs,
_inserted_timestamp,
receipt_json :receipt :Action :signer_id :: STRING AS signer_id,
predecessor_id,
receipt_json AS receipt_actions,
outcome_json :outcome :status :: VARIANT AS status_value,
outcome_json :outcome :logs :: ARRAY AS logs,
_partition_by_block_number
FROM
{{ ref('silver__streamline_receipts_final') }}
{{ ref('silver__receipts_final') }}
WHERE
receipt_succeeded
{% if var("MANUAL_FIX") %}
@ -48,8 +48,7 @@ FINAL AS (
status_value,
logs,
VALUE AS LOG,
_partition_by_block_number,
_inserted_timestamp
_partition_by_block_number
FROM
receipts,
LATERAL FLATTEN(logs)

View File

@ -13,11 +13,18 @@ WITH pool_events AS (
*
FROM
{{ ref('silver__pool_events') }}
WHERE
{% if var("MANUAL_FIX") %}
WHERE
{{ partition_load_manual('no_buffer') }}
{% else %}
{{ incremental_load_filter('_inserted_timestamp') }}
WHERE
modified_timestamp >= (
SELECT
MAX(modified_timestamp)
FROM
{{ this }}
)
{% endif %}
AND LOG LIKE 'Epoch%'
),

View File

@ -17,12 +17,10 @@ WITH txs AS (
block_id,
tx_signer,
tx_receiver,
tx,
tx_status,
_partition_by_block_number,
_inserted_timestamp
transaction_json AS tx,
_partition_by_block_number
FROM
{{ ref('silver__streamline_transactions_final') }}
{{ ref('silver__transactions_final') }}
{% if var("MANUAL_FIX") %}
WHERE {{ partition_load_manual('no_buffer') }}
@ -48,8 +46,7 @@ function_calls AS (
signer_id,
method_name,
args,
_partition_by_block_number,
_inserted_timestamp
_partition_by_block_number
FROM
{{ ref('silver__actions_events_function_call_s3') }}
WHERE
@ -84,9 +81,7 @@ add_addresses_from_tx AS (
signer_id,
method_name,
args,
tx_status,
txs._partition_by_block_number,
txs._inserted_timestamp
txs._partition_by_block_number
FROM
function_calls fc
LEFT JOIN txs USING (tx_hash)
@ -102,8 +97,7 @@ new_pools AS (
args :reward_fee_fraction
) AS reward_fee_fraction,
'Create' AS tx_type,
_partition_by_block_number,
_inserted_timestamp
_partition_by_block_number
FROM
add_addresses_from_tx
WHERE
@ -128,8 +122,7 @@ updated_pools AS (
args :reward_fee_fraction
) AS reward_fee_fraction,
'Update' AS tx_type,
_partition_by_block_number,
_inserted_timestamp
_partition_by_block_number
FROM
add_addresses_from_tx
WHERE

View File

@ -1,124 +0,0 @@
{{ config(
materialized = 'incremental',
merge_exclude_columns = ["inserted_timestamp"],
unique_key = 'action_id_horizon',
cluster_by = ['_inserted_timestamp::date', '_partition_by_block_number'],
tags = ['curated', 'horizon','scheduled_non_core']
) }}
{# Note - multisource model #}
-- TODO ez_actions refactor
WITH all_horizon_receipts AS (
SELECT
tx_hash,
receipt_object_id,
receiver_id,
signer_id,
receipt_succeeded,
logs,
_partition_by_block_number,
_inserted_timestamp
FROM
{{ ref('silver_horizon__receipts') }}
{% if var("MANUAL_FIX") %}
WHERE {{ partition_load_manual('no_buffer') }}
{% else %}
{% if is_incremental() %}
WHERE modified_timestamp >= (
SELECT
MAX(modified_timestamp)
FROM
{{ this }}
)
{% endif %}
{% endif %}
),
decoded_function_calls AS (
SELECT
SPLIT(
action_id,
'-'
) [0] :: STRING AS receipt_object_id,
action_id,
tx_hash,
block_id,
block_timestamp,
method_name,
args,
deposit,
attached_gas,
_partition_by_block_number,
_inserted_timestamp
FROM
{{ ref('silver__actions_events_function_call_s3') }}
WHERE
_partition_by_block_number >= 85000000
AND SPLIT(
action_id,
'-'
) [0] :: STRING IN (
SELECT
DISTINCT receipt_object_id
FROM
all_horizon_receipts
)
{% if var("MANUAL_FIX") %}
AND {{ partition_load_manual('no_buffer') }}
{% else %}
{% if is_incremental() %}
AND modified_timestamp >= (
SELECT
MAX(modified_timestamp)
FROM
{{ this }}
)
{% endif %}
{% endif %}
),
FINAL AS (
SELECT
fc.action_id,
fc.tx_hash,
r.receipt_object_id,
fc.block_id,
fc.block_timestamp,
fc.method_name,
fc.args,
fc.deposit,
fc.attached_gas,
r.receiver_id,
r.signer_id,
r.receipt_succeeded,
r.logs,
fc._partition_by_block_number,
fc._inserted_timestamp
FROM
decoded_function_calls fc
LEFT JOIN all_horizon_receipts r USING (receipt_object_id)
)
SELECT
action_id AS action_id_horizon,
tx_hash,
receipt_object_id,
block_id,
block_timestamp,
method_name,
args,
deposit,
attached_gas,
receiver_id,
signer_id,
receipt_succeeded,
_partition_by_block_number,
_inserted_timestamp,
{{ dbt_utils.generate_surrogate_key(
['action_id_horizon']
) }} AS horizon_decoded_actions_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp,
'{{ invocation_id }}' AS _invocation_id
FROM
FINAL

View File

@ -1,80 +0,0 @@
version: 2
models:
- name: silver_horizon__decoded_actions
description: |-
Decoded FunctionCall events for receipts where the contract nearhorizon.near was called.
columns:
- name: action_id_horizon
description: "{{ doc('action_id')}}"
tests:
- unique
- name: tx_hash
description: "{{ doc('tx_hash')}}"
tests:
- not_null:
where: _inserted_timestamp <= current_timestamp - interval '1 hour'
- name: receipt_object_id
description: "{{ doc('receipt_object_id')}}"
- name: block_id
description: "{{ doc('block_id')}}"
- name: block_timestamp
description: "{{ doc('block_timestamp')}}"
tests:
- not_null:
where: _inserted_timestamp <= current_timestamp - interval '1 hour'
- name: method_name
description: "{{ doc('method_name')}}"
tests:
- not_null
- name: args
description: "{{ doc('args')}}"
tests:
- not_null
- name: deposit
description: "{{ doc('deposit')}}"
- name: attached_gas
description: "{{ doc('attached_gas')}}"
- name: receiver_id
description: "{{ doc('receiver_id')}}"
tests:
- not_null
- name: signer_id
description: "{{ doc('signer_id')}}"
tests:
- not_null
- name: receipt_succeeded
description: "{{ doc('receipt_succeeded')}}"
- name: _partition_by_block_number
description: "{{ doc('_partition_by_block_number')}}"
- name: _INSERTED_TIMESTAMP
description: "{{ doc('_inserted_timestamp')}}"
- name: HORIZON_DECODED_ACTIONS_ID
description: "{{doc('id')}}"
- name: INSERTED_TIMESTAMP
description: "{{doc('inserted_timestamp')}}"
- name: MODIFIED_TIMESTAMP
description: "{{doc('modified_timestamp')}}"
- name: _INVOCATION_ID
description: "{{doc('invocation_id')}}"

View File

@ -1,64 +0,0 @@
{{ config(
materialized = 'incremental',
merge_exclude_columns = ["inserted_timestamp"],
unique_key = 'receipt_object_id',
cluster_by = ['_inserted_timestamp::date', 'block_timestamp::DATE'],
tags = ['curated', 'horizon','scheduled_non_core']
) }}
WITH all_horizon_receipts AS (
SELECT
tx_hash,
receipt_object_id,
block_id,
block_timestamp,
receipt_index,
chunk_hash,
receipt_actions,
execution_outcome,
receipt_outcome_id,
receiver_id,
signer_id,
receipt_type,
gas_burnt,
status_value,
receipt_succeeded,
logs,
proof,
metadata,
_partition_by_block_number,
_inserted_timestamp
FROM
{{ ref('silver__streamline_receipts_final') }}
WHERE (
LOWER(signer_id) = 'nearhorizon.near'
OR LOWER(receiver_id) = 'nearhorizon.near'
)
AND _partition_by_block_number >= 86000000
{% if var("MANUAL_FIX") %}
AND {{ partition_load_manual('no_buffer') }}
{% else %}
{% if is_incremental() %}
AND modified_timestamp >= (
SELECT
MAX(modified_timestamp)
FROM
{{ this }}
)
{% endif %}
{% endif %}
)
SELECT
*,
{{ dbt_utils.generate_surrogate_key(
['receipt_object_id']
) }} AS horizon_receipts_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp,
'{{ invocation_id }}' AS _invocation_id
FROM
all_horizon_receipts

View File

@ -1,81 +0,0 @@
version: 2
models:
- name: silver_horizon__receipts
description: |-
Filtered receipts where the signer or receiver is the contract nearsocial.near.
columns:
- name: tx_hash
description: "{{ doc('tx_hash')}}"
- name: block_id
description: "{{ doc('block_id')}}"
- name: receipt_index
description: "{{ doc('receipt_index')}}"
- name: chunk_hash
description: "{{ doc('chunk_hash')}}"
- name: receipt_actions
description: "{{ doc('receipt')}}"
- name: execution_outcome
description: "{{ doc('execution_outcome')}}"
- name: receipt_object_id
description: "{{ doc('receipt_object_id')}}"
tests:
- unique
- name: receipt_outcome_id
description: "{{ doc('receipt_outcome_id')}}"
- name: receiver_id
description: "{{ doc('receiver_id')}}"
- name: signer_id
description: "{{ doc('signer_id')}}"
- name: receipt_type
description: "{{ doc('receipt_type')}}"
- name: gas_burnt
description: "{{ doc('gas_burnt')}}"
- name: status_value
description: "{{ doc('status_value')}}"
- name: receipt_succeeded
description: "{{ doc('receipt_succeeded')}}"
- name: logs
description: "{{ doc('logs')}}"
- name: proof
description: "{{ doc('proof')}}"
- name: metadata
description: "{{ doc('metadata')}}"
- name: _partition_by_block_number
description: "{{ doc('_partition_by_block_number')}}"
- name: _INSERTED_TIMESTAMP
description: "{{ doc('_inserted_timestamp')}}"
- name: HORIZON_RECEIPTS_ID
description: "{{doc('id')}}"
- name: INSERTED_TIMESTAMP
description: "{{doc('inserted_timestamp')}}"
- name: MODIFIED_TIMESTAMP
description: "{{doc('modified_timestamp')}}"
- name: _INVOCATION_ID
description: "{{doc('invocation_id')}}"

View File

@ -12,12 +12,12 @@
WITH receipts AS (
SELECT
receipt_object_id,
signer_id,
receipt_id AS receipt_object_id,
receipt_json :receipt :Action :signer_id :: STRING AS signer_id,
_partition_by_block_number,
_inserted_timestamp
inserted_timestamp AS _inserted_timestamp
FROM
{{ ref('silver__streamline_receipts_final') }}
{{ ref('silver__receipts_final') }}
WHERE
_partition_by_block_number >= 59670000

View File

@ -2,7 +2,7 @@
materialized = 'incremental',
merge_exclude_columns = ["inserted_timestamp"],
unique_key = 'receipt_object_id',
cluster_by = ['_inserted_timestamp::date', '_partition_by_block_number'],
cluster_by = ['modified_timestamp::date', '_partition_by_block_number'],
tags = ['curated', 'social','scheduled_non_core']
) }}
@ -10,27 +10,26 @@ WITH all_social_receipts AS (
SELECT
tx_hash,
receipt_object_id,
receipt_id AS receipt_object_id,
block_id,
block_timestamp,
receipt_index,
NULL AS receipt_index,
chunk_hash,
receipt_actions,
execution_outcome,
receipt_outcome_id,
receipt_json AS receipt_actions,
outcome_json AS execution_outcome,
outcome_json :outcome :receipt_ids :: ARRAY AS receipt_outcome_id,
receiver_id,
receipt_actions :predecessor_id :: STRING AS predecessor_id,
signer_id,
receipt_type,
gas_burnt,
status_value,
logs,
proof,
metadata,
_partition_by_block_number,
_inserted_timestamp
predecessor_id,
receipt_json :receipt :Action :signer_id :: STRING AS signer_id,
NULL AS receipt_type,
outcome_json :outcome :gas_burnt :: NUMBER AS gas_burnt,
outcome_json :outcome :status :: VARIANT AS status_value,
outcome_json :outcome :logs :: ARRAY AS logs,
outcome_json :proof :: ARRAY AS proof,
outcome_json :outcome :metadata :: VARIANT AS metadata,
_partition_by_block_number
FROM
{{ ref('silver__streamline_receipts_final') }}
{{ ref('silver__receipts_final') }}
WHERE
(
LOWER(signer_id) = 'social.near'

View File

@ -3,7 +3,7 @@
incremental_strategy = 'delete+insert',
unique_key = "block_timestamp_hour",
cluster_by = ['block_timestamp_hour::DATE'],
tags = ['curated','scheduled_non_core']
tags = ['stats','scheduled_non_core']
) }}
/* run incremental timestamp value first then use it as a static value */
{% if execute %}
@ -14,11 +14,11 @@
SELECT
MIN(DATE_TRUNC('hour', block_timestamp)) block_timestamp_hour
FROM
{{ ref('silver__streamline_blocks') }} -- Streamline Migration TODO - change this to fact blocks once table
{{ ref('silver__blocks_final') }}
WHERE
_inserted_timestamp >= (
modified_timestamp >= (
SELECT
MAX(_inserted_timestamp)
MAX(modified_timestamp)
FROM
{{ this }}
) {% endset %}
@ -35,7 +35,7 @@ SELECT
COUNT(
1
) AS block_count,
MAX(_inserted_timestamp) AS _inserted_timestamp,
MAX(inserted_timestamp) AS _inserted_timestamp,
{{ dbt_utils.generate_surrogate_key(
['block_timestamp_hour']
) }} AS core_metrics_block_hourly_id,
@ -43,7 +43,7 @@ SELECT
SYSDATE() AS modified_timestamp,
'{{ invocation_id }}' AS _invocation_id
FROM
{{ ref('silver__streamline_blocks') }}
{{ ref('silver__blocks_final') }}
WHERE
block_timestamp_hour < DATE_TRUNC(
'hour',

View File

@ -3,7 +3,7 @@
incremental_strategy = 'delete+insert',
unique_key = "block_timestamp_hour",
cluster_by = ['block_timestamp_hour::DATE'],
tags = ['curated','scheduled_non_core']
tags = ['stats','scheduled_non_core']
) }}
/* run incremental timestamp value first then use it as a static value */
{% if execute %}
@ -14,11 +14,11 @@
SELECT
MIN(DATE_TRUNC('hour', block_timestamp)) block_timestamp_hour
FROM
{{ ref('silver__streamline_transactions_final') }} -- Streamline Migration TODO - change this to fact transactions once table
{{ ref('silver__transactions_final') }}
WHERE
_inserted_timestamp >= (
modified_timestamp >= (
SELECT
MAX(_inserted_timestamp)
MAX(modified_timestamp)
FROM
{{ this }}
) {% endset %}
@ -50,7 +50,7 @@ SELECT
DISTINCT tx_receiver
) AS unique_to_count,
SUM(transaction_fee / pow(10, 24)) AS total_fees,
MAX(_inserted_timestamp) AS _inserted_timestamp,
MAX(inserted_timestamp) AS _inserted_timestamp,
{{ dbt_utils.generate_surrogate_key(
['block_timestamp_hour']
) }} AS core_metrics_hourly_id,
@ -58,7 +58,7 @@ SELECT
SYSDATE() AS modified_timestamp,
'{{ invocation_id }}' AS _invocation_id
FROM
{{ ref('silver__streamline_transactions_final') }} -- Streamline Migration TODO - change this to fact transactions once table
{{ ref('silver__transactions_final') }}
WHERE
block_timestamp_hour < DATE_TRUNC(
'hour',

Some files were not shown because too many files have changed in this diff Show More