Merge remote-tracking branch 'origin/main' into AN-6236-Kitty

This commit is contained in:
stanz 2025-09-15 21:53:12 +07:00
commit e98a14da16
76 changed files with 3319 additions and 80 deletions

View File

@ -0,0 +1,53 @@
name: dbt_run_evm_testnet
run-name: dbt_run_evm_testnet
on:
workflow_dispatch:
schedule:
# Every 4 hours at 30 minutes past the hour
- cron: "30 */4 * * *"
env:
USE_VARS: "${{ vars.USE_VARS }}"
DBT_PROFILES_DIR: "${{ vars.DBT_PROFILES_DIR }}"
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Update evm tables
run: |
dbt run -s tag:evm_testnet
notify-failure:
needs: [run_dbt_jobs]
if: failure()
uses: ./.github/workflows/slack_notify.yml
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.EVM_SLACK_WEBHOOK_URL }}

View File

@ -4,7 +4,7 @@ run-name: dbt_run_scheduled_streamline_non_core
on:
workflow_dispatch:
schedule:
# Daily at 06:00 UTC
# Once daily at 06:00 UTC
- cron: "0 6 * * *"
env:
@ -45,14 +45,15 @@ jobs:
run: >
dbt run -s tag:streamline_non_core
- name: Test DBT Models
run: >
dbt test -s tag:streamline_non_core
continue-on-error: true
# Temporarily disabled 2025-08-29 during Snag API issue triage
# - name: Test DBT Models
# run: >
# dbt test -s tag:streamline_non_core
# continue-on-error: true
- name: Log test results
run: |
python python/dbt_test_alert.py
# - name: Log test results
# run: |
# python python/dbt_test_alert.py
- name: Store logs
uses: actions/upload-artifact@v4

View File

@ -0,0 +1,53 @@
name: dbt_run_streamline_evm_testnet_realtime
run-name: dbt_run_streamline_evm_testnet_realtime
on:
workflow_dispatch:
schedule:
# Every 4 hours
- cron: "0 */4 * * *"
env:
USE_VARS: "${{ vars.USE_VARS }}"
DBT_PROFILES_DIR: "${{ vars.DBT_PROFILES_DIR }}"
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -s 2+tag:streamline_evm_testnet_realtime
notify-failure:
needs: [run_dbt_jobs]
if: failure()
uses: ./.github/workflows/slack_notify.yml
secrets:
SLACK_WEBHOOK_URL: ${{ secrets.EVM_SLACK_WEBHOOK_URL }}

View File

@ -6,8 +6,8 @@ description: |
on:
workflow_dispatch:
schedule:
# Runs hourly
- cron: "0 * * * *"
# Runs every 6 minutes
- cron: "0,6,12,18,24,30,36,42,48,54 * * * *"
env:
USE_VARS: "${{ vars.USE_VARS }}"

View File

@ -45,7 +45,7 @@ jobs:
- name: Run DBT Jobs
run: |
dbt test -s "flow_models,models/gold" tag:observability tag:evm_gap_test --vars '{"TEST_RANGE": True, "START_HEIGHT": ${{ vars.STREAMLINE_START_BLOCK }}, "END_HEIGHT": 100000000}'
dbt test -s "flow_models,models/gold" tag:observability tag:evm_gap_test --exclude "flow_models,models/gold/rewards" --vars '{"TEST_RANGE": True, "START_HEIGHT": ${{ vars.STREAMLINE_START_BLOCK }}, "END_HEIGHT": 100000000}'
continue-on-error: true
- name: Log test results

4
macros/dbt/get_merge.sql Normal file
View File

@ -0,0 +1,4 @@
{% macro get_merge_sql(target, source, unique_key, dest_columns, incremental_predicates) -%}
{% set merge_sql = fsc_utils.get_merge_sql(target, source, unique_key, dest_columns, incremental_predicates) %}
{{ return(merge_sql) }}
{% endmacro %}

View File

@ -41,8 +41,8 @@
{% do run_query(sql) %}
{% set sql %}
CREATE api integration IF NOT EXISTS aws_flow_evm_api_dev api_provider = aws_api_gateway api_aws_role_arn = 'arn:aws:iam::704693948482:role/flow-api-stg-rolesnowflakeudfsAF733095-tPEdygwPC6IV' api_allowed_prefixes = (
'https://pfv9lhg3kg.execute-api.us-east-1.amazonaws.com/stg/'
CREATE api integration IF NOT EXISTS aws_flow_evm_api_dev api_provider = aws_api_gateway api_aws_role_arn = 'arn:aws:iam::704693948482:role/flow-api-stg-rolesnowflakeudfsAF733095-ybejBONVMTd4' api_allowed_prefixes = (
'https://2hcu4hei27.execute-api.us-east-1.amazonaws.com/stg/'
) enabled = TRUE;
{% endset %}
{% do run_query(sql) %}

View File

@ -88,7 +88,7 @@
{% if target.name == "prod" %}
aws_flow_evm_api_prod AS 'https://rajpkbgko9.execute-api.us-east-1.amazonaws.com/prod/udf_bulk_rest_api'
{% else %}
aws_flow_evm_api_dev AS 'https://pfv9lhg3kg.execute-api.us-east-1.amazonaws.com/stg/udf_bulk_rest_api'
aws_flow_evm_api_dev AS 'https://2hcu4hei27.execute-api.us-east-1.amazonaws.com/stg/udf_bulk_rest_api'
{%- endif %};
{% endmacro %}

View File

@ -1,23 +0,0 @@
{{ config (
materialized = 'view',
tags = ['scheduled']
) }}
SELECT
record_id,
offset_id,
block_id,
block_timestamp,
network,
chain_id,
tx_count,
header,
ingested_at AS _ingested_at,
_inserted_timestamp
FROM
{{ source(
'prod',
'flow_blocks'
) }}
WHERE
_inserted_timestamp :: DATE >= '2022-05-01'

View File

@ -1,24 +0,0 @@
{{ config (
materialized = 'view',
tags = ['scheduled']
) }}
SELECT
record_id,
tx_id,
tx_block_index,
offset_id,
block_id,
block_timestamp,
network,
chain_id,
tx,
ingested_at AS _ingested_at,
_inserted_timestamp
FROM
{{ source(
'prod',
'flow_txs'
) }}
WHERE
_inserted_timestamp :: DATE >= '2022-05-01'

View File

@ -0,0 +1,8 @@
{{ config (
materialized = 'view'
) }}
{{ streamline_external_table_FR_query_v2(
model = "evm_testnet_blocks",
partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 3), '_', 1) AS INTEGER )"
) }}

View File

@ -0,0 +1,8 @@
{{ config (
materialized = 'view'
) }}
{{ streamline_external_table_FR_query_v2(
model = "evm_testnet_receipts",
partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 3), '_', 1) AS INTEGER )"
) }}

View File

@ -0,0 +1,9 @@
{{ config (
materialized = 'view',
tags = ['traces']
) }}
{{ streamline_external_table_FR_query_v2(
model = "evm_testnet_traces",
partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 3), '_', 1) AS INTEGER )"
) }}

View File

@ -0,0 +1,8 @@
{{ config (
materialized = 'view'
) }}
{{ streamline_external_table_query_v2(
model = "evm_testnet_blocks",
partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 3), '_', 1) AS INTEGER )"
) }}

View File

@ -0,0 +1,8 @@
{{ config (
materialized = 'view'
) }}
{{ streamline_external_table_query_v2(
model = "evm_testnet_receipts",
partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 3), '_', 1) AS INTEGER )"
) }}

View File

@ -0,0 +1,9 @@
{{ config (
materialized = 'view',
tags = ['traces']
) }}
{{ streamline_external_table_query_v2(
model = "evm_testnet_traces",
partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 3), '_', 1) AS INTEGER )"
) }}

View File

@ -0,0 +1,64 @@
{{ config (
materialized = "incremental",
incremental_strategy = 'delete+insert',
unique_key = "block_number",
cluster_by = ['block_timestamp::DATE'],
tags = ['evm_testnet']
) }}
SELECT
block_number,
block_json :hash :: STRING AS block_hash,
utils.udf_hex_to_int(
block_json :timestamp :: STRING
) :: TIMESTAMP AS block_timestamp,
'testnet' AS network,
ARRAY_SIZE(
block_json :transactions
) AS tx_count,
utils.udf_hex_to_int(
block_json :size :: STRING
) :: bigint AS SIZE,
block_json :miner :: STRING AS miner,
block_json :mixHash :: STRING AS mix_hash,
block_json :extraData :: STRING AS extra_data,
block_json :parentHash :: STRING AS parent_hash,
utils.udf_hex_to_int(
block_json :gasUsed :: STRING
) :: bigint AS gas_used,
utils.udf_hex_to_int(
block_json :gasLimit :: STRING
) :: bigint AS gas_limit,
utils.udf_hex_to_int(
block_json :baseFeePerGas :: STRING
) :: bigint AS base_fee_per_gas,
utils.udf_hex_to_int(
block_json :difficulty :: STRING
) :: bigint AS difficulty,
utils.udf_hex_to_int(
block_json :totalDifficulty :: STRING
) :: bigint AS total_difficulty,
block_json :sha3Uncles :: STRING AS sha3_uncles,
block_json :uncles AS uncle_blocks,
utils.udf_hex_to_int(
block_json :nonce :: STRING
) :: bigint AS nonce,
block_json :receiptsRoot :: STRING AS receipts_root,
block_json :stateRoot :: STRING AS state_root,
block_json :transactionsRoot :: STRING AS transactions_root,
block_json :logsBloom :: STRING AS logs_bloom,
{{ dbt_utils.generate_surrogate_key(['block_number']) }} AS fact_blocks_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp
FROM
{{ ref('silver_evm__testnet_blocks') }}
WHERE 1=1
{% if is_incremental() %}
AND modified_timestamp > (
SELECT
COALESCE(MAX(modified_timestamp), '1970-01-01' :: TIMESTAMP) AS modified_timestamp
FROM
{{ this }}
)
{% endif %}

View File

@ -0,0 +1,56 @@
version: 2
models:
- name: testnet__fact_evm_blocks
description: '{{ doc("evm_blocks_table_doc") }}'
columns:
- name: BLOCK_NUMBER
description: '{{ doc("evm_block_number") }}'
- name: BLOCK_HASH
description: '{{ doc("evm_blocks_hash") }}'
- name: BLOCK_TIMESTAMP
description: '{{ doc("evm_block_timestamp") }}'
- name: NETWORK
description: '{{ doc("evm_network") }}'
- name: TX_COUNT
description: '{{ doc("evm_tx_count") }}'
- name: SIZE
description: '{{ doc("evm_size") }}'
- name: MINER
description: '{{ doc("evm_miner") }}'
- name: BASE_FEE_PER_GAS
description: '{{ doc("evm_base_fee_per_gas") }}'
- name: MIX_HASH
description: '{{ doc("evm_mix_hash") }}'
- name: EXTRA_DATA
description: '{{ doc("evm_extra_data") }}'
- name: PARENT_HASH
description: '{{ doc("evm_parent_hash") }}'
- name: GAS_USED
description: '{{ doc("evm_gas_used") }}'
- name: GAS_LIMIT
description: '{{ doc("evm_gas_limit") }}'
- name: DIFFICULTY
description: '{{ doc("evm_difficulty") }}'
- name: TOTAL_DIFFICULTY
description: '{{ doc("evm_total_difficulty") }}'
- name: SHA3_UNCLES
description: '{{ doc("evm_sha3_uncles") }}'
- name: UNCLE_BLOCKS
description: '{{ doc("evm_uncle_blocks") }}'
- name: NONCE
description: '{{ doc("evm_blocks_nonce") }}'
- name: RECEIPTS_ROOT
description: '{{ doc("evm_receipts_root") }}'
- name: STATE_ROOT
description: '{{ doc("evm_state_root") }}'
- name: TRANSACTIONS_ROOT
description: '{{ doc("evm_transactions_root") }}'
- name: LOGS_BLOOM
description: '{{ doc("evm_logs_bloom") }}'
- name: FACT_BLOCKS_ID
description: '{{ doc("evm_pk") }}'
- name: INSERTED_TIMESTAMP
description: '{{ doc("evm_inserted_timestamp") }}'
- name: MODIFIED_TIMESTAMP
description: '{{ doc("evm_modified_timestamp") }}'

View File

@ -0,0 +1,223 @@
{{ config (
materialized = "incremental",
incremental_strategy = 'delete+insert',
unique_key = "block_number",
cluster_by = ['block_timestamp::DATE'],
tags = ['evm_testnet']
) }}
WITH base AS (
SELECT
block_number,
{% if uses_receipts_by_hash %}
tx_hash,
{% else %}
receipts_json :transactionHash :: STRING AS tx_hash,
{% endif %}
receipts_json,
receipts_json :logs AS full_logs
FROM
{{ ref('silver_evm__testnet_receipts') }}
WHERE
1 = 1
AND ARRAY_SIZE(receipts_json :logs) > 0
{% if is_incremental() %}
AND modified_timestamp > (
SELECT
COALESCE(MAX(modified_timestamp), '1970-01-01' :: TIMESTAMP) AS modified_timestamp
FROM
{{ this }})
{% endif %}
),
flattened_logs AS (
SELECT
block_number,
tx_hash,
lower(receipts_json :from :: STRING) AS origin_from_address,
lower(receipts_json :to :: STRING) AS origin_to_address,
CASE
WHEN receipts_json :status :: STRING = '0x1' THEN TRUE
WHEN receipts_json :status :: STRING = '1' THEN TRUE
WHEN receipts_json :status :: STRING = '0x0' THEN FALSE
WHEN receipts_json :status :: STRING = '0' THEN FALSE
ELSE NULL
END AS tx_succeeded,
VALUE :address :: STRING AS contract_address,
VALUE :blockHash :: STRING AS block_hash,
VALUE :blockNumber :: STRING AS block_number_hex,
VALUE :data :: STRING AS DATA,
utils.udf_hex_to_int(
VALUE :logIndex :: STRING
) :: INT AS event_index,
VALUE :removed :: BOOLEAN AS event_removed,
VALUE :topics AS topics,
VALUE :transactionHash :: STRING AS transaction_hash,
utils.udf_hex_to_int(
VALUE :transactionIndex :: STRING
) :: INT AS transaction_index
FROM
base,
LATERAL FLATTEN (
input => full_logs
)
),
new_logs AS (
SELECT
l.block_number,
b.block_timestamp,
l.tx_hash,
l.transaction_index AS tx_position,
l.event_index,
l.contract_address,
l.topics,
l.topics [0] :: STRING AS topic_0,
l.topics [1] :: STRING AS topic_1,
l.topics [2] :: STRING AS topic_2,
l.topics [3] :: STRING AS topic_3,
l.data,
l.event_removed,
txs.from_address AS origin_from_address,
txs.to_address AS origin_to_address,
txs.origin_function_signature,
l.tx_succeeded
FROM
flattened_logs l
LEFT JOIN {{ ref('testnet__fact_evm_blocks') }}
b
ON l.block_number = b.block_number
{% if is_incremental() %}
AND b.modified_timestamp >= (
SELECT
MAX(modified_timestamp) :: DATE - 1
FROM
{{ this }}
)
{% endif %}
LEFT JOIN {{ ref('testnet__fact_evm_transactions') }}
txs
ON l.tx_hash = txs.tx_hash
AND l.block_number = txs.block_number
{% if is_incremental() %}
AND txs.modified_timestamp >= (
SELECT
MAX(modified_timestamp) :: DATE - 1
FROM
{{ this }}
)
{% endif %}
)
{% if is_incremental() %},
missing_data AS (
SELECT
t.block_number,
b.block_timestamp AS block_timestamp_heal,
t.tx_hash,
t.tx_position,
t.event_index,
t.contract_address,
t.topics,
t.topic_0,
t.topic_1,
t.topic_2,
t.topic_3,
t.data,
t.event_removed,
txs.from_address AS origin_from_address_heal,
txs.to_address AS origin_to_address_heal,
txs.origin_function_signature AS origin_function_signature_heal,
t.tx_succeeded
FROM
{{ this }}
t
LEFT JOIN {{ ref('testnet__fact_evm_transactions') }}
txs
ON t.tx_hash = txs.tx_hash
AND t.block_number = txs.block_number
LEFT JOIN {{ ref('testnet__fact_evm_blocks') }}
b
ON t.block_number = b.block_number
WHERE
t.block_timestamp IS NULL
OR t.origin_function_signature IS NULL
)
{% endif %},
all_logs AS (
SELECT
block_number,
block_timestamp,
tx_hash,
tx_position,
event_index,
contract_address,
topics,
topic_0,
topic_1,
topic_2,
topic_3,
DATA,
event_removed,
origin_from_address,
origin_to_address,
origin_function_signature,
tx_succeeded
FROM
new_logs
{% if is_incremental() %}
UNION ALL
SELECT
block_number,
block_timestamp_heal AS block_timestamp,
tx_hash,
tx_position,
event_index,
contract_address,
topics,
topic_0,
topic_1,
topic_2,
topic_3,
DATA,
event_removed,
origin_from_address_heal AS origin_from_address,
origin_to_address_heal AS origin_to_address,
origin_function_signature_heal AS origin_function_signature,
tx_succeeded
FROM
missing_data
{% endif %}
)
SELECT
block_number,
block_timestamp,
tx_hash,
tx_position,
event_index,
contract_address,
topics,
topic_0,
topic_1,
topic_2,
topic_3,
DATA,
event_removed,
origin_from_address,
origin_to_address,
origin_function_signature,
tx_succeeded,
{{ dbt_utils.generate_surrogate_key(['tx_hash','event_index']) }} AS fact_event_logs_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp
FROM
all_logs qualify ROW_NUMBER() over (
PARTITION BY fact_event_logs_id
ORDER BY
block_number DESC,
block_timestamp DESC nulls last,
origin_function_signature DESC nulls last
) = 1

View File

@ -0,0 +1,46 @@
version: 2
models:
- name: testnet__fact_evm_event_logs
description: '{{ doc("evm_logs_table_doc") }}'
columns:
- name: BLOCK_NUMBER
description: '{{ doc("evm_block_number") }}'
- name: BLOCK_TIMESTAMP
description: '{{ doc("evm_block_timestamp") }}'
- name: TX_HASH
description: '{{ doc("evm_tx_hash") }}'
- name: TX_POSITION
description: '{{ doc("evm_tx_position") }}'
- name: EVENT_INDEX
description: '{{ doc("evm_event_index") }}'
- name: CONTRACT_ADDRESS
description: '{{ doc("evm_logs_contract_address") }}'
- name: TOPICS
description: '{{ doc("evm_topics") }}'
- name: TOPIC_0
description: '{{ doc("evm_topic_0") }}'
- name: TOPIC_1
description: '{{ doc("evm_topic_1") }}'
- name: TOPIC_2
description: '{{ doc("evm_topic_2") }}'
- name: TOPIC_3
description: '{{ doc("evm_topic_3") }}'
- name: DATA
description: '{{ doc("evm_logs_data") }}'
- name: EVENT_REMOVED
description: '{{ doc("evm_event_removed") }}'
- name: ORIGIN_FROM_ADDRESS
description: '{{ doc("evm_from_address") }}'
- name: ORIGIN_TO_ADDRESS
description: '{{ doc("evm_to_address") }}'
- name: ORIGIN_FUNCTION_SIGNATURE
description: '{{ doc("evm_origin_sig") }}'
- name: TX_SUCCEEDED
description: '{{ doc("evm_tx_succeeded") }}'
- name: FACT_EVENT_LOGS_ID
description: '{{ doc("evm_pk") }}'
- name: INSERTED_TIMESTAMP
description: '{{ doc("evm_inserted_timestamp") }}'
- name: MODIFIED_TIMESTAMP
description: '{{ doc("evm_modified_timestamp") }}'

View File

@ -0,0 +1,379 @@
{{ config(
materialized = "incremental",
incremental_strategy = 'delete+insert',
unique_key = "block_number",
cluster_by = ['block_timestamp::DATE'],
tags = ['evm_testnet']
) }}
WITH silver_traces AS (
SELECT
block_number,
tx_position,
trace_address,
parent_trace_address,
trace_address_array,
trace_json,
traces_id,
'regular' AS source
FROM {{ ref('silver_evm__testnet_traces') }}
WHERE 1 = 1
{% if is_incremental() %}
AND modified_timestamp > (
SELECT COALESCE(MAX(modified_timestamp), '1970-01-01'::TIMESTAMP) AS modified_timestamp
FROM {{ this }}
)
{% endif %}
),
sub_traces AS (
SELECT
block_number,
tx_position,
parent_trace_address,
COUNT(*) AS sub_traces
FROM silver_traces
GROUP BY
block_number,
tx_position,
parent_trace_address
),
trace_index_array AS (
SELECT
block_number,
tx_position,
trace_address,
ARRAY_AGG(flat_value) AS number_array
FROM (
SELECT
block_number,
tx_position,
trace_address,
IFF(VALUE::STRING = 'ORIGIN', -1, VALUE::INT) AS flat_value
FROM silver_traces,
LATERAL FLATTEN(input => trace_address_array)
)
GROUP BY
block_number,
tx_position,
trace_address
),
trace_index_sub_traces AS (
SELECT
b.block_number,
b.tx_position,
b.trace_address,
IFNULL(sub_traces, 0) AS sub_traces,
number_array,
ROW_NUMBER() OVER (
PARTITION BY b.block_number, b.tx_position
ORDER BY number_array ASC
) - 1 AS trace_index,
b.trace_json,
b.traces_id,
b.source
FROM silver_traces b
LEFT JOIN sub_traces s
ON b.block_number = s.block_number
AND b.tx_position = s.tx_position
AND b.trace_address = s.parent_trace_address
JOIN trace_index_array n
ON b.block_number = n.block_number
AND b.tx_position = n.tx_position
AND b.trace_address = n.trace_address
),
errored_traces AS (
SELECT
block_number,
tx_position,
trace_address,
trace_json
FROM trace_index_sub_traces
WHERE trace_json:error::STRING IS NOT NULL
),
error_logic AS (
SELECT
b0.block_number,
b0.tx_position,
b0.trace_address,
b0.trace_json:error::STRING AS error,
b1.trace_json:error::STRING AS any_error,
b2.trace_json:error::STRING AS origin_error
FROM trace_index_sub_traces b0
LEFT JOIN errored_traces b1
ON b0.block_number = b1.block_number
AND b0.tx_position = b1.tx_position
AND b0.trace_address RLIKE CONCAT('^', b1.trace_address, '(_[0-9]+)*$')
LEFT JOIN errored_traces b2
ON b0.block_number = b2.block_number
AND b0.tx_position = b2.tx_position
AND b2.trace_address = 'ORIGIN'
),
aggregated_errors AS (
SELECT
block_number,
tx_position,
trace_address,
error,
IFF(
MAX(any_error) IS NULL AND error IS NULL AND origin_error IS NULL,
TRUE,
FALSE
) AS trace_succeeded
FROM error_logic
GROUP BY
block_number,
tx_position,
trace_address,
error,
origin_error
),
json_traces AS (
SELECT
block_number,
tx_position,
trace_address,
sub_traces,
number_array,
trace_index,
trace_succeeded,
trace_json:error::STRING AS error_reason,
trace_json:revertReason::STRING AS revert_reason,
lower(trace_json:from::STRING) AS from_address,
lower(trace_json:to::STRING) AS to_address,
IFNULL(trace_json:value::STRING, '0x0') AS value_hex,
IFNULL(utils.udf_hex_to_int(trace_json:value::STRING), '0') AS value_precise_raw,
utils.udf_decimal_adjust(value_precise_raw, 18) AS value_precise,
value_precise::FLOAT AS value,
utils.udf_hex_to_int(trace_json:gas::STRING)::INT AS gas,
utils.udf_hex_to_int(trace_json:gasUsed::STRING)::INT AS gas_used,
trace_json:input::STRING AS input,
trace_json:output::STRING AS output,
trace_json:type::STRING AS type,
traces_id
FROM trace_index_sub_traces
JOIN aggregated_errors USING (
block_number,
tx_position,
trace_address
)
),
incremental_traces AS (
SELECT
f.block_number,
t.tx_hash,
t.block_timestamp,
t.origin_function_signature,
t.from_address AS origin_from_address,
t.to_address AS origin_to_address,
f.tx_position,
f.trace_index,
f.from_address AS from_address,
f.to_address AS to_address,
f.value_hex,
f.value_precise_raw,
f.value_precise,
f.value,
f.gas,
f.gas_used,
f.input,
f.output,
f.type,
f.sub_traces,
f.error_reason,
f.revert_reason,
f.traces_id,
f.trace_succeeded,
f.trace_address,
t.tx_succeeded
FROM json_traces f
LEFT OUTER JOIN {{ ref('testnet__fact_evm_transactions') }} t
ON f.tx_position = t.tx_position
AND f.block_number = t.block_number
{% if is_incremental() %}
AND t.modified_timestamp >= (
SELECT DATEADD('hour', -24, MAX(modified_timestamp))
FROM {{ this }}
)
{% endif %}
)
{% if is_incremental() %},
overflow_blocks AS (
SELECT DISTINCT block_number
FROM silver_traces
WHERE source = 'overflow'
),
heal_missing_data AS (
SELECT
t.block_number,
txs.tx_hash,
txs.block_timestamp AS block_timestamp_heal,
txs.origin_function_signature AS origin_function_signature_heal,
txs.from_address AS origin_from_address_heal,
txs.to_address AS origin_to_address_heal,
t.tx_position,
t.trace_index,
t.from_address,
t.to_address,
t.value_hex,
t.value_precise_raw,
t.value_precise,
t.value,
t.gas,
t.gas_used,
t.input,
t.output,
t.type,
t.sub_traces,
t.error_reason,
t.revert_reason,
t.fact_traces_id AS traces_id,
t.trace_succeeded,
t.trace_address,
txs.tx_succeeded AS tx_succeeded_heal
FROM {{ this }} t
JOIN {{ ref('testnet__fact_evm_transactions') }} txs
ON t.tx_position = txs.tx_position
AND t.block_number = txs.block_number
WHERE t.tx_position IS NULL
OR t.block_timestamp IS NULL
OR t.tx_succeeded IS NULL
)
{% endif %},
all_traces AS (
SELECT
block_number,
tx_hash,
block_timestamp,
origin_function_signature,
origin_from_address,
origin_to_address,
tx_position,
trace_index,
from_address,
to_address,
value_hex,
value_precise_raw,
value_precise,
value,
gas,
gas_used,
input,
output,
type,
sub_traces,
error_reason,
revert_reason,
trace_succeeded,
trace_address,
tx_succeeded
FROM incremental_traces
{% if is_incremental() %}
UNION ALL
SELECT
block_number,
tx_hash,
block_timestamp_heal AS block_timestamp,
origin_function_signature_heal AS origin_function_signature,
origin_from_address_heal AS origin_from_address,
origin_to_address_heal AS origin_to_address,
tx_position,
trace_index,
from_address,
to_address,
value_hex,
value_precise_raw,
value_precise,
value,
gas,
gas_used,
input,
output,
type,
sub_traces,
error_reason,
revert_reason,
trace_succeeded,
trace_address,
tx_succeeded_heal AS tx_succeeded
FROM heal_missing_data
UNION ALL
SELECT
block_number,
tx_hash,
block_timestamp,
origin_function_signature,
origin_from_address,
origin_to_address,
tx_position,
trace_index,
from_address,
to_address,
value_hex,
value_precise_raw,
value_precise,
value,
gas,
gas_used,
input,
output,
type,
sub_traces,
error_reason,
revert_reason,
trace_succeeded,
trace_address,
tx_succeeded
FROM {{ this }}
JOIN overflow_blocks USING (block_number)
{% endif %}
)
SELECT
block_number,
block_timestamp,
tx_hash,
tx_position,
trace_index,
from_address,
to_address,
input,
output,
type,
trace_address,
sub_traces,
value,
value_precise_raw,
value_precise,
value_hex,
gas,
gas_used,
origin_from_address,
origin_to_address,
origin_function_signature,
trace_succeeded,
error_reason,
revert_reason,
tx_succeeded,
{{ dbt_utils.generate_surrogate_key(['tx_hash', 'trace_index']) }} AS fact_traces_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp
FROM all_traces
QUALIFY (ROW_NUMBER() OVER (
PARTITION BY block_number, tx_position, trace_index
ORDER BY modified_timestamp DESC, block_timestamp DESC NULLS LAST
)) = 1

View File

@ -0,0 +1,62 @@
version: 2
models:
- name: testnet__fact_evm_traces
description: '{{ doc("evm_traces_table_doc") }}'
columns:
- name: BLOCK_NUMBER
description: '{{ doc("evm_block_number") }}'
- name: BLOCK_TIMESTAMP
description: '{{ doc("evm_block_timestamp") }}'
- name: TX_HASH
description: '{{ doc("evm_tx_hash") }}'
- name: TX_POSITION
description: '{{ doc("evm_tx_position") }}'
- name: TRACE_INDEX
description: '{{ doc("evm_trace_index") }}'
- name: FROM_ADDRESS
description: '{{ doc("evm_from_address") }}'
- name: TO_ADDRESS
description: '{{ doc("evm_to_address") }}'
- name: INPUT
description: '{{ doc("evm_traces_input") }}'
- name: OUTPUT
description: '{{ doc("evm_traces_output") }}'
- name: TYPE
description: '{{ doc("evm_traces_type") }}'
- name: TRACE_ADDRESS
description: '{{ doc("evm_trace_address") }}'
- name: SUB_TRACES
description: '{{ doc("evm_sub_traces") }}'
- name: VALUE
description: '{{ doc("evm_value") }}'
- name: VALUE_PRECISE_RAW
description: '{{ doc("evm_precise_amount_unadjusted") }}'
- name: VALUE_PRECISE
description: '{{ doc("evm_precise_amount_adjusted") }}'
- name: VALUE_HEX
description: '{{ doc("evm_value_hex") }}'
- name: GAS
description: '{{ doc("evm_traces_gas") }}'
- name: GAS_USED
description: '{{ doc("evm_traces_gas_used") }}'
- name: ORIGIN_FROM_ADDRESS
description: '{{ doc("evm_traces_from") }}'
- name: ORIGIN_TO_ADDRESS
description: '{{ doc("evm_traces_to") }}'
- name: ORIGIN_FUNCTION_SIGNATURE
description: '{{ doc("evm_origin_sig") }}'
- name: TRACE_SUCCEEDED
description: '{{ doc("evm_trace_succeeded") }}'
- name: ERROR_REASON
description: '{{ doc("evm_trace_error_reason") }}'
- name: REVERT_REASON
description: '{{ doc("evm_revert_reason") }}'
- name: TX_SUCCEEDED
description: '{{ doc("evm_tx_succeeded") }}'
- name: FACT_TRACES_ID
description: '{{ doc("evm_pk") }}'
- name: INSERTED_TIMESTAMP
description: '{{ doc("evm_inserted_timestamp") }}'
- name: MODIFIED_TIMESTAMP
description: '{{ doc("evm_modified_timestamp") }}'

View File

@ -0,0 +1,348 @@
{{ config (
materialized = "incremental",
incremental_strategy = 'delete+insert',
unique_key = "block_number",
cluster_by = ['block_timestamp::DATE'],
tags = ['evm_testnet']
) }}
WITH base AS (
SELECT
block_number,
tx_position,
transaction_json
FROM
{{ ref('silver_evm__testnet_transactions') }}
{% if is_incremental() %}
WHERE
modified_timestamp > (
SELECT
COALESCE(MAX(modified_timestamp), '1970-01-01' :: TIMESTAMP) AS modified_timestamp
FROM
{{ this }})
{% endif %}
),
transactions_fields AS (
SELECT
block_number,
tx_position,
transaction_json :blockHash :: STRING AS block_hash,
transaction_json :blockNumber :: STRING AS block_number_hex,
lower(transaction_json :from :: STRING) AS from_address,
utils.udf_hex_to_int(
transaction_json :gas :: STRING
) :: bigint AS gas_limit,
utils.udf_hex_to_int(
transaction_json :gasPrice :: STRING
) :: bigint AS gas_price,
transaction_json :hash :: STRING AS tx_hash,
transaction_json :input :: STRING AS input_data,
LEFT(
input_data,
10
) AS origin_function_signature,
utils.udf_hex_to_int(
transaction_json :nonce :: STRING
) :: bigint AS nonce,
transaction_json :r :: STRING AS r,
transaction_json :s :: STRING AS s,
lower(transaction_json :to :: STRING) AS to_address1,
CASE
WHEN to_address1 = '' THEN NULL
ELSE to_address1
END AS to_address,
utils.udf_hex_to_int(
transaction_json :transactionIndex :: STRING
) :: bigint AS transaction_index,
utils.udf_hex_to_int(
transaction_json :type :: STRING
) :: bigint AS tx_type,
utils.udf_hex_to_int(
transaction_json :v :: STRING
) :: bigint AS v,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
transaction_json :maxFeePerGas :: STRING
)
) / pow(
10,
9
) AS max_fee_per_gas,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
transaction_json :maxPriorityFeePerGas :: STRING
)
) / pow(
10,
9
) AS max_priority_fee_per_gas,
utils.udf_hex_to_int(
transaction_json :value :: STRING
) AS value_precise_raw,
utils.udf_decimal_adjust(
value_precise_raw,
18
) AS value_precise,
value_precise :: FLOAT AS VALUE,
utils.udf_hex_to_int(transaction_json :yParity :: STRING):: bigint AS y_parity,
transaction_json :accessList AS access_list,
FROM
base
),
new_transactions AS (
SELECT
txs.block_number,
txs.block_hash,
b.block_timestamp,
txs.tx_hash,
txs.from_address,
txs.to_address,
txs.origin_function_signature,
txs.value,
txs.value_precise_raw,
txs.value_precise,
txs.max_fee_per_gas,
txs.max_priority_fee_per_gas,
txs.y_parity,
txs.access_list,
utils.udf_decimal_adjust(
txs.gas_price * utils.udf_hex_to_int(
r.receipts_json :gasUsed :: STRING
) :: bigint,
18
) AS tx_fee_precise,
COALESCE(
tx_fee_precise :: FLOAT,
0
) AS tx_fee,
CASE
WHEN r.receipts_json :status :: STRING = '0x1' THEN TRUE
WHEN r.receipts_json :status :: STRING = '1' THEN TRUE
WHEN r.receipts_json :status :: STRING = '0x0' THEN FALSE
WHEN r.receipts_json :status :: STRING = '0' THEN FALSE
ELSE NULL
END AS tx_succeeded,
txs.tx_type,
txs.nonce,
txs.tx_position,
txs.input_data,
txs.gas_price / pow(
10,
9
) AS gas_price,
utils.udf_hex_to_int(
r.receipts_json :gasUsed :: STRING
) :: bigint AS gas_used,
txs.gas_limit,
utils.udf_hex_to_int(
r.receipts_json :cumulativeGasUsed :: STRING
) :: bigint AS cumulative_gas_used,
utils.udf_hex_to_int(
r.receipts_json :effectiveGasPrice :: STRING
) :: bigint AS effective_gas_price,
txs.r,
txs.s,
txs.v
FROM
transactions_fields txs
LEFT JOIN {{ ref('testnet__fact_evm_blocks') }}
b
ON txs.block_number = b.block_number
{% if is_incremental() %}
AND b.modified_timestamp >= (
SELECT
MAX(modified_timestamp) :: DATE - 1
FROM
{{ this }}
)
{% endif %}
LEFT JOIN {{ ref('silver_evm__testnet_receipts') }}
r
ON txs.block_number = r.block_number
AND txs.tx_hash = r.receipts_json :transactionHash :: STRING
{% if is_incremental() %}
AND r.modified_timestamp >= (
SELECT
MAX(modified_timestamp) :: DATE - 1
FROM
{{ this }}
)
{% endif %}
)
{% if is_incremental() %},
missing_data AS (
SELECT
t.block_number,
b.block_timestamp AS block_timestamp_heal,
t.tx_hash,
t.from_address,
t.to_address,
t.origin_function_signature,
t.value,
t.value_precise_raw,
t.value_precise,
t.max_fee_per_gas,
t.max_priority_fee_per_gas,
t.y_parity,
t.access_list,
utils.udf_decimal_adjust(
t.gas_price * utils.udf_hex_to_int(
r.receipts_json :gasUsed :: STRING
) :: bigint,
9
) AS tx_fee_precise_heal,
COALESCE(
tx_fee_precise_heal :: FLOAT,
0
) AS tx_fee_heal,
CASE
WHEN r.receipts_json :status :: STRING = '0x1' THEN TRUE
WHEN r.receipts_json :status :: STRING = '0x0' THEN FALSE
ELSE NULL
END AS tx_succeeded_heal,
t.tx_type,
t.nonce,
t.tx_position,
t.input_data,
t.gas_price,
utils.udf_hex_to_int(
r.receipts_json :gasUsed :: STRING
) :: bigint AS gas_used_heal,
t.gas_limit,
utils.udf_hex_to_int(
r.receipts_json :cumulativeGasUsed :: STRING
) :: bigint AS cumulative_gas_used_heal,
utils.udf_hex_to_int(
r.receipts_json :effectiveGasPrice :: STRING
) :: bigint AS effective_gas_price_heal,
t.r,
t.s,
t.v
FROM
{{ this }}
t
LEFT JOIN {{ ref('testnet__fact_evm_blocks') }}
b
ON t.block_number = b.block_number
LEFT JOIN {{ ref('silver_evm__testnet_receipts') }}
r
ON t.block_number = r.block_number
AND t.tx_hash = r.receipts_json :transactionHash :: STRING
WHERE
t.block_timestamp IS NULL
OR t.tx_succeeded IS NULL
)
{% endif %},
all_transactions AS (
SELECT
block_number,
block_timestamp,
tx_hash,
from_address,
to_address,
origin_function_signature,
VALUE,
value_precise_raw,
value_precise,
max_fee_per_gas,
max_priority_fee_per_gas,
y_parity,
access_list,
tx_fee,
tx_fee_precise,
tx_succeeded,
tx_type,
nonce,
tx_position,
input_data,
gas_price,
gas_used,
gas_limit,
cumulative_gas_used,
effective_gas_price,
r,
s,
v
FROM
new_transactions
{% if is_incremental() %}
UNION ALL
SELECT
block_number,
block_timestamp_heal AS block_timestamp,
tx_hash,
from_address,
to_address,
origin_function_signature,
VALUE,
value_precise_raw,
value_precise,
max_fee_per_gas,
max_priority_fee_per_gas,
y_parity,
access_list,
tx_fee_heal AS tx_fee,
tx_fee_precise_heal AS tx_fee_precise,
tx_succeeded_heal AS tx_succeeded,
tx_type,
nonce,
tx_position,
input_data,
gas_price,
gas_used_heal AS gas_used,
gas_limit,
cumulative_gas_used_heal AS cumulative_gas_used,
effective_gas_price_heal AS effective_gas_price,
r,
s,
v
FROM
missing_data
{% endif %}
)
SELECT
block_number,
block_timestamp,
tx_hash,
from_address,
to_address,
origin_function_signature,
VALUE,
value_precise_raw,
value_precise,
tx_fee,
tx_fee_precise,
tx_succeeded,
tx_type,
nonce,
tx_position,
input_data,
gas_price,
gas_used,
gas_limit,
cumulative_gas_used,
effective_gas_price,
max_fee_per_gas,
max_priority_fee_per_gas,
y_parity,
access_list,
r,
s,
v,
{{ dbt_utils.generate_surrogate_key(['tx_hash']) }} AS fact_transactions_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp
FROM
all_transactions qualify ROW_NUMBER() over (
PARTITION BY fact_transactions_id
ORDER BY
block_number DESC,
block_timestamp DESC nulls last,
tx_succeeded DESC nulls last
) = 1

View File

@ -0,0 +1,68 @@
version: 2
models:
- name: testnet__fact_evm_transactions
description: '{{ doc("evm_tx_table_doc") }}'
columns:
- name: BLOCK_NUMBER
description: '{{ doc("evm_block_number") }}'
- name: BLOCK_TIMESTAMP
description: '{{ doc("evm_block_timestamp") }}'
- name: TX_HASH
description: '{{ doc("evm_tx_hash") }}'
- name: FROM_ADDRESS
description: '{{ doc("evm_from_address") }}'
- name: TO_ADDRESS
description: '{{ doc("evm_to_address") }}'
- name: ORIGIN_FUNCTION_SIGNATURE
description: '{{ doc("evm_tx_origin_sig") }}'
- name: VALUE
description: '{{ doc("evm_value") }}'
- name: VALUE_PRECISE_RAW
description: '{{ doc("evm_precise_amount_unadjusted") }}'
- name: VALUE_PRECISE
description: '{{ doc("evm_precise_amount_adjusted") }}'
- name: TX_FEE
description: '{{ doc("evm_tx_fee") }}'
- name: TX_FEE_PRECISE
description: '{{ doc("evm_tx_fee_precise") }}'
- name: TX_SUCCEEDED
description: '{{ doc("evm_tx_succeeded") }}'
- name: TX_TYPE
description: '{{ doc("evm_tx_type") }}'
- name: NONCE
description: '{{ doc("evm_tx_nonce") }}'
- name: TX_POSITION
description: '{{ doc("evm_tx_position") }}'
- name: INPUT_DATA
description: '{{ doc("evm_tx_input_data") }}'
- name: GAS_PRICE
description: '{{ doc("evm_tx_gas_price") }}'
- name: GAS_USED
description: '{{ doc("evm_tx_gas_used") }}'
- name: GAS_LIMIT
description: '{{ doc("evm_tx_gas_limit") }}'
- name: CUMULATIVE_GAS_USED
description: '{{ doc("evm_cumulative_gas_used") }}'
- name: EFFECTIVE_GAS_PRICE
description: '{{ doc("evm_effective_gas_price") }}'
- name: R
description: '{{ doc("evm_r") }}'
- name: S
description: '{{ doc("evm_s") }}'
- name: V
description: '{{ doc("evm_v") }}'
- name: MAX_FEE_PER_GAS
description: '{{ doc("evm_max_fee_per_gas") }}'
- name: MAX_PRIORITY_FEE_PER_GAS
description: '{{ doc("evm_max_priority_fee_per_gas") }}'
- name: Y_PARITY
description: '{{ doc("evm_y_parity") }}'
- name: ACCESS_LIST
description: '{{ doc("evm_access_list") }}'
- name: FACT_TRANSACTIONS_ID
description: '{{ doc("evm_pk") }}'
- name: INSERTED_TIMESTAMP
description: '{{ doc("evm_inserted_timestamp") }}'
- name: MODIFIED_TIMESTAMP
description: '{{ doc("evm_modified_timestamp") }}'

View File

@ -0,0 +1,9 @@
{{ config (
materialized = "view",
tags = ['full_evm_test']
) }}
SELECT
*
FROM
{{ ref('testnet__fact_evm_blocks') }}

View File

@ -0,0 +1,145 @@
version: 2
models:
- name: test_gold_testnet_evm__fact_blocks_full
description: "This is a view used to test all of the gold testnet fact blocks model."
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- BLOCK_NUMBER
- sequence_gaps:
column_name: BLOCK_NUMBER
where: BLOCK_TIMESTAMP < CURRENT_DATE - 1
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: BLOCK_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: BLOCK_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_LTZ
- TIMESTAMP_NTZ
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: hour
interval: 2
- name: NETWORK
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: ^[a-zA-Z0-9_]+$
- name: TX_COUNT
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: SIZE
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: MINER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: EXTRA_DATA
tests:
- not_null
- name: PARENT_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: GAS_USED
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: GAS_LIMIT
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: DIFFICULTY
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: SHA3_UNCLES
tests:
- not_null
- name: UNCLE_BLOCKS
tests:
- not_null
- name: NONCE
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: RECEIPTS_ROOT
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: STATE_ROOT
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TRANSACTIONS_ROOT
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: LOGS_BLOOM
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: FACT_BLOCKS_ID
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_unique
- name: INSERTED_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: hour
interval: 2
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_LTZ
- TIMESTAMP_NTZ
- name: MODIFIED_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: hour
interval: 2
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_LTZ
- TIMESTAMP_NTZ

View File

@ -0,0 +1,16 @@
{{ config (
materialized = "view",
tags = ['recent_evm_test']
) }}
SELECT
*
FROM
{{ ref('testnet__fact_evm_blocks') }}
WHERE
block_number > (
SELECT
block_number
FROM
{{ ref('_evm_testnet_block_lookback') }}
)

View File

@ -0,0 +1,147 @@
version: 2
models:
- name: test_gold_testnet_evm__fact_blocks_recent
description: "This is a view used to test the last three days of gold testnet fact blocks."
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- BLOCK_NUMBER
- sequence_gaps:
column_name: BLOCK_NUMBER
config:
severity: error
error_if: ">10"
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: BLOCK_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: BLOCK_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_LTZ
- TIMESTAMP_NTZ
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: hour
interval: 2
- name: NETWORK
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: ^[a-zA-Z0-9_]+$
- name: TX_COUNT
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: SIZE
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: MINER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: EXTRA_DATA
tests:
- not_null
- name: PARENT_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: GAS_USED
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: GAS_LIMIT
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: DIFFICULTY
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: SHA3_UNCLES
tests:
- not_null
- name: UNCLE_BLOCKS
tests:
- not_null
- name: NONCE
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: RECEIPTS_ROOT
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: STATE_ROOT
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TRANSACTIONS_ROOT
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: LOGS_BLOOM
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: FACT_BLOCKS_ID
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_unique
- name: INSERTED_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: hour
interval: 2
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_LTZ
- TIMESTAMP_NTZ
- name: MODIFIED_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: hour
interval: 2
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_LTZ
- TIMESTAMP_NTZ

View File

@ -0,0 +1,9 @@
{{ config (
materialized = "view",
tags = ['full_evm_test']
) }}
SELECT
*
FROM
{{ ref('testnet__fact_evm_event_logs') }}

View File

@ -0,0 +1,101 @@
version: 2
models:
- name: test_gold_testnet_evm__fact_event_logs_full
description: "This is a view used to test all of the gold testnet fact event logs model."
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- TX_HASH
- EVENT_INDEX
- sequence_gaps:
partition_by:
- BLOCK_NUMBER
column_name: EVENT_INDEX
where: BLOCK_TIMESTAMP < CURRENT_DATE - 1
- events_match_txs:
transactions_model: ref('test_gold_evm__fact_transactions_full')
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- name: BLOCK_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: hour
interval: 2
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_LTZ
- TIMESTAMP_NTZ
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- fsc_utils.tx_block_count:
config:
severity: error
error_if: "!=0"
- name: TX_POSITION
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: EVENT_INDEX
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: CONTRACT_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TOPICS
tests:
- not_null
- name: DATA
tests:
- not_null
- name: EVENT_REMOVED
tests:
- not_null
- name: ORIGIN_FROM_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: ORIGIN_TO_ADDRESS
tests:
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null
- name: TX_SUCCEEDED
tests:
- not_null
- name: FACT_EVENT_LOGS_ID
tests:
- not_null
- name: INSERTED_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: hour
interval: 2
- name: MODIFIED_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: hour
interval: 2

View File

@ -0,0 +1,16 @@
{{ config (
materialized = "view",
tags = ['recent_evm_test']
) }}
SELECT
*
FROM
{{ ref('testnet__fact_evm_event_logs') }}
WHERE
block_number > (
SELECT
block_number
FROM
{{ ref('_evm_testnet_block_lookback') }}
)

View File

@ -0,0 +1,100 @@
version: 2
models:
- name: test_gold_testnet_evm__fact_event_logs_recent
description: "This is a view used to test the last three days of gold testnet fact event logs."
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- TX_HASH
- EVENT_INDEX
- sequence_gaps:
partition_by:
- BLOCK_NUMBER
column_name: EVENT_INDEX
- events_match_txs:
transactions_model: ref('test_gold_testnet_evm__fact_transactions_recent')
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- name: BLOCK_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: hour
interval: 2
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_LTZ
- TIMESTAMP_NTZ
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- fsc_utils.tx_block_count:
config:
severity: error
error_if: "!=0"
- name: TX_POSITION
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: EVENT_INDEX
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: CONTRACT_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TOPICS
tests:
- not_null
- name: DATA
tests:
- not_null
- name: EVENT_REMOVED
tests:
- not_null
- name: ORIGIN_FROM_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: ORIGIN_TO_ADDRESS
tests:
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null
- name: TX_SUCCEEDED
tests:
- not_null
- name: FACT_EVENT_LOGS_ID
tests:
- not_null
- name: INSERTED_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: hour
interval: 2
- name: MODIFIED_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: hour
interval: 2

View File

@ -0,0 +1,9 @@
{{ config (
materialized = "view",
tags = ['full_evm_test']
) }}
SELECT
*
FROM
{{ ref('testnet__fact_evm_traces') }}

View File

@ -0,0 +1,120 @@
version: 2
models:
- name: test_gold_testnet_evm__fact_traces_full
description: "This is a view used to test all of the gold testnet fact traces model."
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- TX_HASH
- TRACE_INDEX
- sequence_gaps:
partition_by:
- TX_HASH
column_name: TRACE_INDEX
where: BLOCK_TIMESTAMP < CURRENT_DATE - 1 AND TX_HASH IS NOT NULL
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: BLOCK_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: hour
interval: 2
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_LTZ
- TIMESTAMP_NTZ
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TX_POSITION
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: TRACE_INDEX
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: FROM_ADDRESS
tests:
- not_null:
where: TYPE <> 'SELFDESTRUCT'
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TO_ADDRESS
tests:
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
where: TO_ADDRESS IS NOT NULL
- name: INPUT
tests:
- not_null
- name: TYPE
tests:
- not_null
- name: TRACE_ADDRESS
tests:
- not_null
- name: SUB_TRACES
tests:
- not_null
- name: VALUE
tests:
- not_null
- name: VALUE_PRECISE_RAW
tests:
- not_null
- name: VALUE_PRECISE
tests:
- not_null
- name: VALUE_HEX
tests:
- not_null
- name: GAS
tests:
- not_null
- name: GAS_USED
tests:
- not_null
- name: ORIGIN_FROM_ADDRESS
tests:
- not_null
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null
- name: TRACE_SUCCEEDED
tests:
- not_null
- name: TX_SUCCEEDED
tests:
- not_null
- name: FACT_TRACES_ID
tests:
- not_null
- name: INSERTED_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: hour
interval: 2
- name: MODIFIED_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: hour
interval: 2

View File

@ -0,0 +1,16 @@
{{ config (
materialized = "view",
tags = ['recent_evm_test']
) }}
SELECT
*
FROM
{{ ref('testnet__fact_evm_traces') }}
WHERE
block_number > (
SELECT
block_number
FROM
{{ ref('_evm_testnet_block_lookback') }}
)

View File

@ -0,0 +1,120 @@
version: 2
models:
- name: test_gold_testnet_evm__fact_traces_recent
description: "This is a view used to test the last three days of gold testnet fact traces."
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- TX_HASH
- TRACE_INDEX
- sequence_gaps:
partition_by:
- TX_HASH
column_name: TRACE_INDEX
where: TX_HASH IS NOT NULL
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: BLOCK_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: hour
interval: 2
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_LTZ
- TIMESTAMP_NTZ
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TX_POSITION
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: TRACE_INDEX
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: FROM_ADDRESS
tests:
- not_null:
where: TYPE <> 'SELFDESTRUCT'
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TO_ADDRESS
tests:
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
where: TO_ADDRESS IS NOT NULL
- name: INPUT
tests:
- not_null
- name: TYPE
tests:
- not_null
- name: TRACE_ADDRESS
tests:
- not_null
- name: SUB_TRACES
tests:
- not_null
- name: VALUE
tests:
- not_null
- name: VALUE_PRECISE_RAW
tests:
- not_null
- name: VALUE_PRECISE
tests:
- not_null
- name: VALUE_HEX
tests:
- not_null
- name: GAS
tests:
- not_null
- name: GAS_USED
tests:
- not_null
- name: ORIGIN_FROM_ADDRESS
tests:
- not_null
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null
- name: TRACE_SUCCEEDED
tests:
- not_null
- name: TX_SUCCEEDED
tests:
- not_null
- name: FACT_TRACES_ID
tests:
- not_null
- name: INSERTED_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: hour
interval: 2
- name: MODIFIED_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: hour
interval: 2

View File

@ -0,0 +1,9 @@
{{ config (
materialized = "view",
tags = ['full_evm_test']
) }}
SELECT
*
FROM
{{ ref('testnet__fact_evm_transactions') }}

View File

@ -0,0 +1,125 @@
version: 2
models:
- name: test_gold_testnet_evm__fact_transactions_full
description: "This is a view used to test all of the gold testnet fact transactions model."
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- TX_HASH
- sequence_gaps:
partition_by:
- BLOCK_NUMBER
column_name: TX_POSITION
where: BLOCK_TIMESTAMP < CURRENT_DATE - 1
- txs_match_blocks:
blocks_model: ref('test_gold_testnet_evm__fact_blocks_full')
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: BLOCK_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_LTZ
- TIMESTAMP_NTZ
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: hour
interval: 2
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: FROM_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TO_ADDRESS
tests:
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
where: TO_ADDRESS IS NOT NULL
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null
- name: VALUE
tests:
- not_null
- name: VALUE_PRECISE_RAW
tests:
- not_null
- name: VALUE_PRECISE
tests:
- not_null
- name: TX_FEE
tests:
- not_null
- name: TX_FEE_PRECISE
tests:
- not_null
- name: TX_SUCCEEDED
tests:
- not_null
- name: TX_TYPE
tests:
- not_null
- name: NONCE
tests:
- not_null
- name: TX_POSITION
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: INPUT_DATA
tests:
- not_null
- name: GAS_PRICE
tests:
- not_null
- name: GAS_USED
tests:
- not_null
- name: GAS_LIMIT
tests:
- not_null
- name: CUMULATIVE_GAS_USED
tests:
- not_null
- name: EFFECTIVE_GAS_PRICE
tests:
- not_null
- name: R
tests:
- not_null
- name: S
tests:
- not_null
- name: V
tests:
- not_null
- name: FACT_TRANSACTIONS_ID
tests:
- not_null
- name: INSERTED_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: hour
interval: 2
- name: MODIFIED_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: hour
interval: 2

View File

@ -0,0 +1,16 @@
{{ config (
materialized = "view",
tags = ['recent_evm_test']
) }}
SELECT
*
FROM
{{ ref('testnet__fact_evm_transactions') }}
WHERE
block_number > (
SELECT
block_number
FROM
{{ ref('_evm_testnet_block_lookback') }}
)

View File

@ -0,0 +1,124 @@
version: 2
models:
- name: test_gold_testnet_evm__fact_transactions_recent
description: "This is a view used to test the last three days of gold testnet fact transactions."
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- TX_HASH
- sequence_gaps:
partition_by:
- BLOCK_NUMBER
column_name: TX_POSITION
- txs_match_blocks:
blocks_model: ref('test_gold_testnet_evm__fact_blocks_recent')
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: BLOCK_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_LTZ
- TIMESTAMP_NTZ
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: hour
interval: 2
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: FROM_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TO_ADDRESS
tests:
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
where: TO_ADDRESS IS NOT NULL
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null
- name: VALUE
tests:
- not_null
- name: VALUE_PRECISE_RAW
tests:
- not_null
- name: VALUE_PRECISE
tests:
- not_null
- name: TX_FEE
tests:
- not_null
- name: TX_FEE_PRECISE
tests:
- not_null
- name: TX_SUCCEEDED
tests:
- not_null
- name: TX_TYPE
tests:
- not_null
- name: NONCE
tests:
- not_null
- name: TX_POSITION
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: INPUT_DATA
tests:
- not_null
- name: GAS_PRICE
tests:
- not_null
- name: GAS_USED
tests:
- not_null
- name: GAS_LIMIT
tests:
- not_null
- name: CUMULATIVE_GAS_USED
tests:
- not_null
- name: EFFECTIVE_GAS_PRICE
tests:
- not_null
- name: R
tests:
- not_null
- name: S
tests:
- not_null
- name: V
tests:
- not_null
- name: FACT_TRANSACTIONS_ID
tests:
- not_null
- name: INSERTED_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: hour
interval: 2
- name: MODIFIED_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: hour
interval: 2

View File

@ -1,5 +1,6 @@
{{ config (
materialized = "incremental",
incremental_predicates = ["dynamic_range_predicate", "block_timestamp::DATE"],
unique_key = "created_contract_address",
merge_exclude_columns = ["inserted_timestamp"],
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION ON EQUALITY(block_timestamp, tx_hash, created_contract_address, creator_address), SUBSTRING(created_contract_address, creator_address)",

View File

@ -0,0 +1,42 @@
-- depends_on: {{ ref('bronze_evm__testnet_blocks') }}
{{ config (
materialized = "incremental",
incremental_strategy = 'delete+insert',
unique_key = "block_number",
cluster_by = ['modified_timestamp::DATE','partition_key'],
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(block_number)",
tags = ['evm_testnet']
) }}
WITH bronze_blocks AS (
SELECT
block_number,
partition_key,
DATA:result AS block_json,
_inserted_timestamp
FROM
{% if is_incremental() %}
{{ ref('bronze_evm__testnet_blocks') }}
WHERE _inserted_timestamp >= (
SELECT
COALESCE(MAX(_inserted_timestamp), '1900-01-01'::TIMESTAMP) AS _inserted_timestamp
FROM {{ this }}
) AND DATA:result IS NOT NULL
{% else %}
{{ ref('bronze_evm__FR_testnet_blocks') }}
WHERE DATA:result IS NOT NULL
{% endif %}
)
SELECT
block_number,
partition_key,
block_json,
_inserted_timestamp,
{{ dbt_utils.generate_surrogate_key(['block_number']) }} AS blocks_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp,
'{{ invocation_id }}' AS _invocation_id
FROM bronze_blocks
QUALIFY ROW_NUMBER() OVER (PARTITION BY blocks_id ORDER BY _inserted_timestamp DESC) = 1

View File

@ -0,0 +1,61 @@
-- depends_on: {{ ref('bronze_evm__testnet_receipts') }}
-- depends_on: {{ ref('bronze_evm__FR_testnet_receipts') }}
{{ config (
materialized = "incremental",
incremental_strategy = 'delete+insert',
unique_key = "block_number",
cluster_by = ['modified_timestamp::DATE','partition_key'],
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(block_number)",
tags = ['evm_testnet']
) }}
WITH bronze AS (
SELECT
block_number,
DATA,
partition_key,
_inserted_timestamp
FROM
{% if is_incremental() %}
{{ ref('bronze_evm__testnet_receipts') }}
WHERE
_inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) _inserted_timestamp
FROM
{{ this }}
) AND data:result[0] is not null
{% else %}
{{ ref('bronze_evm__FR_testnet_receipts') }}
WHERE data:result[0] is not null
{% endif %}
qualify(ROW_NUMBER() over (PARTITION BY block_number
ORDER BY
_inserted_timestamp DESC)) = 1
),
flat_receipts AS (
SELECT
block_number,
partition_key,
index :: INT AS array_index,
value AS receipts_json,
_inserted_timestamp
FROM bronze,
LATERAL FLATTEN(input => data:result) AS receipt
)
SELECT
block_number,
partition_key,
array_index,
receipts_json,
_inserted_timestamp,
{{ dbt_utils.generate_surrogate_key(['block_number','array_index']) }} AS receipts_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp,
'{{ invocation_id }}' AS _invocation_id
FROM flat_receipts
QUALIFY(ROW_NUMBER() OVER (PARTITION BY block_number, array_index ORDER BY _inserted_timestamp DESC)) = 1

View File

@ -0,0 +1,141 @@
-- depends_on: {{ ref('bronze_evm__testnet_traces') }}
-- depends_on: {{ ref('bronze_evm__FR_testnet_traces') }}
{{ config (
materialized = "incremental",
incremental_strategy = 'delete+insert',
unique_key = "block_number",
cluster_by = ['modified_timestamp::DATE','partition_key'],
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(block_number)",
tags = ['evm_testnet']
) }}
WITH bronze_traces AS (
SELECT
value:"BLOCK_NUMBER"::INT AS block_number,
partition_key,
value:array_index::INT AS tx_position,
DATA :result AS full_traces,
_inserted_timestamp
FROM
{% if is_incremental()%}
{{ ref('bronze_evm__testnet_traces') }}
WHERE
DATA :result IS NOT NULL
AND _inserted_timestamp >= (
SELECT
COALESCE(MAX(_inserted_timestamp), '1900-01-01') _inserted_timestamp
FROM
{{ this }}
)
{% else %}
{{ ref('bronze_evm__FR_testnet_traces') }}
WHERE DATA :result IS NOT NULL
{% endif %}
qualify(ROW_NUMBER() over (PARTITION BY block_number, tx_position
ORDER BY
_inserted_timestamp DESC)) = 1
),
flatten_traces AS (
SELECT
block_number,
tx_position,
partition_key,
IFF(
path IN (
'result',
'result.value',
'result.type',
'result.to',
'result.input',
'result.gasUsed',
'result.gas',
'result.from',
'result.output',
'result.error',
'result.revertReason',
'result.time',
'gasUsed',
'gas',
'type',
'to',
'from',
'value',
'input',
'error',
'output',
'time',
'revertReason'
),
'ORIGIN',
REGEXP_REPLACE(REGEXP_REPLACE(path, '[^0-9]+', '_'), '^_|_$', '')
) AS trace_address,
_inserted_timestamp,
OBJECT_AGG(
key,
VALUE
) AS trace_json,
CASE
WHEN trace_address = 'ORIGIN' THEN NULL
WHEN POSITION(
'_' IN trace_address
) = 0 THEN 'ORIGIN'
ELSE REGEXP_REPLACE(
trace_address,
'_[0-9]+$',
'',
1,
1
)
END AS parent_trace_address,
SPLIT(
trace_address,
'_'
) AS trace_address_array
FROM
bronze_traces txs,
TABLE(
FLATTEN(
input => PARSE_JSON(
txs.full_traces
),
recursive => TRUE
)
) f
WHERE
f.index IS NULL
AND f.key != 'calls'
AND f.path != 'result'
GROUP BY
block_number,
tx_position,
partition_key,
trace_address,
_inserted_timestamp
)
SELECT
block_number,
tx_position,
trace_address,
parent_trace_address,
trace_address_array,
trace_json,
partition_key,
_inserted_timestamp,
{{ dbt_utils.generate_surrogate_key(
['block_number'] +
['tx_position'] +
['trace_address']
) }} AS traces_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp,
'{{ invocation_id }}' AS _invocation_id
FROM
flatten_traces qualify(ROW_NUMBER() over(PARTITION BY traces_id
ORDER BY
_inserted_timestamp DESC)) = 1

View File

@ -0,0 +1,53 @@
-- depends_on: {{ ref('bronze_evm__testnet_blocks') }}
{{ config (
materialized = "incremental",
incremental_strategy = 'delete+insert',
unique_key = "block_number",
cluster_by = ['modified_timestamp::DATE','partition_key'],
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(block_number)",
tags = ['evm_testnet']
) }}
WITH flat_txs as (
SELECT
block_number,
partition_key,
_inserted_timestamp,
data
FROM
{% if is_incremental() %}
{{ ref('bronze_evm__testnet_blocks') }}
WHERE _inserted_timestamp >= (
SELECT
COALESCE(MAX(_inserted_timestamp), '1900-01-01'::TIMESTAMP) AS _inserted_timestamp
FROM {{ this }}
) AND data:result:transactions[0] is not null
{% else %}
{{ ref('bronze_evm__FR_testnet_blocks') }}
WHERE data:result:transactions[0] is not null
{% endif %}
),
bronze_transactions AS (
SELECT
block_number,
partition_key,
index :: INT AS tx_position,
value AS transaction_json,
_inserted_timestamp
FROM flat_txs,
LATERAL FLATTEN(input => data:result:transactions) AS tx
)
SELECT
block_number,
partition_key,
tx_position,
transaction_json,
_inserted_timestamp,
{{ dbt_utils.generate_surrogate_key(['block_number','tx_position']) }} AS transactions_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp,
'{{ invocation_id }}' AS _invocation_id
FROM bronze_transactions
QUALIFY ROW_NUMBER() OVER (PARTITION BY transactions_id ORDER BY _inserted_timestamp DESC) = 1

View File

@ -2,6 +2,7 @@
-- depends_on: {{ ref('bronze_evm__FR_blocks') }}
{{ config (
materialized = "incremental",
incremental_predicates = ["dynamic_range_predicate", "partition_key"],
unique_key = "block_number",
cluster_by = "ROUND(block_number, -3)",
merge_exclude_columns = ["inserted_timestamp"],

View File

@ -2,6 +2,7 @@
-- depends_on: {{ ref('bronze_evm__FR_receipts') }}
{{ config (
materialized = "incremental",
incremental_predicates = ["dynamic_range_predicate", "partition_key"],
unique_key = "block_number",
cluster_by = "ROUND(block_number, -3)",
merge_exclude_columns = ["inserted_timestamp"],

View File

@ -2,6 +2,7 @@
-- depends_on: {{ ref('bronze_evm__FR_traces') }}
{{ config (
materialized = "incremental",
incremental_predicates = ["dynamic_range_predicate", "partition_key"],
unique_key = "block_number",
cluster_by = "ROUND(block_number, -3)",
merge_exclude_columns = ["inserted_timestamp"],

View File

@ -0,0 +1,11 @@
{{ config (
materialized = "ephemeral"
) }}
SELECT
MIN(block_number) AS block_number
FROM
{{ ref("testnet__fact_evm_blocks") }}
WHERE
block_timestamp >= DATEADD('hour', -72, TRUNCATE(SYSDATE(), 'HOUR'))
AND block_timestamp < DATEADD('hour', -71, TRUNCATE(SYSDATE(), 'HOUR'))

View File

@ -0,0 +1,44 @@
-- depends_on: {{ ref('bronze_evm__testnet_blocks') }}
-- depends_on: {{ ref('bronze_evm__FR_testnet_blocks') }}
{{ config (
materialized = "incremental",
incremental_predicates = ["dynamic_range_predicate", "partition_key"],
unique_key = "block_number",
cluster_by = "ROUND(block_number, -3)",
merge_exclude_columns = ["inserted_timestamp"],
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(block_number)",
tags = ['streamline_complete_evm_testnet']
) }}
SELECT
block_number,
utils.udf_hex_to_int(DATA :result :number :: STRING) as blockNumber,
partition_key,
_inserted_timestamp,
{{ dbt_utils.generate_surrogate_key(
['block_number::STRING']
) }} AS complete_evm_blocks_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp,
'{{ invocation_id }}' AS _invocation_id
FROM
{% if is_incremental() %}
{{ ref('bronze_evm__testnet_blocks') }}
WHERE
_inserted_timestamp >= COALESCE(
(
SELECT
MAX(_inserted_timestamp) _inserted_timestamp
FROM
{{ this }}
),
'1900-01-01' :: timestamp_ntz
)
{% else %}
{{ ref('bronze_evm__FR_testnet_blocks') }}
{% endif %}
qualify(ROW_NUMBER() over (PARTITION BY block_number
ORDER BY
_inserted_timestamp DESC)) = 1

View File

@ -0,0 +1,43 @@
-- depends_on: {{ ref('bronze_evm__testnet_receipts') }}
-- depends_on: {{ ref('bronze_evm__FR_testnet_receipts') }}
{{ config (
materialized = "incremental",
incremental_predicates = ["dynamic_range_predicate", "partition_key"],
unique_key = "block_number",
cluster_by = "ROUND(block_number, -3)",
merge_exclude_columns = ["inserted_timestamp"],
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(block_number)",
tags = ['streamline_complete_evm_testnet']
) }}
SELECT
block_number,
partition_key,
_inserted_timestamp,
{{ dbt_utils.generate_surrogate_key(
['block_number::STRING']
) }} AS complete_evm_receipts_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp,
'{{ invocation_id }}' AS _invocation_id
FROM
{% if is_incremental() %}
{{ ref('bronze_evm__testnet_receipts') }}
WHERE
_inserted_timestamp >= COALESCE(
(
SELECT
MAX(_inserted_timestamp) _inserted_timestamp
FROM
{{ this }}
),
'1900-01-01' :: timestamp_ntz
)
{% else %}
{{ ref('bronze_evm__FR_testnet_receipts') }}
{% endif %}
qualify(ROW_NUMBER() over (PARTITION BY block_number
ORDER BY
_inserted_timestamp DESC)) = 1

View File

@ -0,0 +1,45 @@
-- depends_on: {{ ref('bronze_evm__testnet_traces') }}
-- depends_on: {{ ref('bronze_evm__FR_testnet_traces') }}
{{ config (
materialized = "incremental",
incremental_predicates = ["dynamic_range_predicate", "partition_key"],
unique_key = "block_number",
cluster_by = "ROUND(block_number, -3)",
merge_exclude_columns = ["inserted_timestamp"],
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(block_number)",
tags = ['streamline_complete_evm_testnet']
) }}
SELECT
value:"BLOCK_NUMBER"::INT AS block_number,
partition_key,
_inserted_timestamp,
{{ dbt_utils.generate_surrogate_key(
['block_number::STRING']
) }} AS complete_evm_traces_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp,
'{{ invocation_id }}' AS _invocation_id
FROM
{% if is_incremental() %}
{{ ref('bronze_evm__testnet_traces') }}
WHERE
_inserted_timestamp >= COALESCE(
(
SELECT
MAX(_inserted_timestamp) _inserted_timestamp
FROM
{{ this }}
),
'1900-01-01' :: timestamp_ntz
)
{% else %}
{{ ref('bronze_evm__FR_testnet_traces') }}
WHERE
TRUE
{% endif %}
qualify(ROW_NUMBER() over (PARTITION BY block_number
ORDER BY
_inserted_timestamp DESC)) = 1

View File

@ -0,0 +1,89 @@
{{ config (
materialized = "view",
post_hook = fsc_utils.if_data_call_function_v2(
func = 'streamline.udf_bulk_rest_api_v2',
target = "{{this.schema}}.{{this.identifier}}",
params ={ "external_table" :"evm_testnet_blocks",
"sql_limit" :"25000",
"producer_batch_size" :"5000",
"worker_batch_size" :"1000",
"sql_source" :"{{this.identifier}}" }
),
tags = ['streamline_realtime_evm_testnet']
) }}
WITH last_3_days AS (
SELECT
GREATEST(ZEROIFNULL(block_number), 67860000) AS block_number
FROM
{{ ref("_evm_testnet_block_lookback") }}
),
tbl AS (
SELECT
block_number
FROM
{{ ref('streamline__evm_testnet_blocks') }}
WHERE
(
block_number >= (
SELECT
block_number
FROM
last_3_days
)
)
AND block_number IS NOT NULL
EXCEPT
SELECT
block_number
FROM
{{ ref('streamline__complete_get_evm_testnet_blocks') }}
WHERE
block_number >= (
SELECT
block_number
FROM
last_3_days
)
AND _inserted_timestamp >= DATEADD(
'day',
-4,
SYSDATE()
)
AND blockNumber IS NOT NULL
)
SELECT
block_number,
DATE_PART(epoch_second, SYSDATE()) :: STRING AS request_timestamp,
'{{ invocation_id }}' AS _invocation_id,
ROUND(
block_number,
-3
) :: INT AS partition_key,
{{ target.database }}.live.udf_api(
'POST',
'{Service}/{Authentication}',
OBJECT_CONSTRUCT(
'Content-Type',
'application/json'
),
OBJECT_CONSTRUCT(
'id',
block_number,
'jsonrpc',
'2.0',
'method',
'eth_getBlockByNumber',
'params',
ARRAY_CONSTRUCT(
utils.udf_int_to_hex(block_number),
TRUE -- Include transactions
)
),
'Vault/prod/flow/quicknode/testnet'
) AS request
FROM
tbl
ORDER BY
block_number DESC

View File

@ -0,0 +1,99 @@
{{ config (
materialized = "view",
post_hook = fsc_utils.if_data_call_function_v2(
func = 'streamline.udf_bulk_rest_api_v2',
target = "{{this.schema}}.{{this.identifier}}",
params ={ "external_table" :"evm_testnet_receipts",
"sql_limit" :"25000",
"producer_batch_size" :"5000",
"worker_batch_size" :"1000",
"sql_source" :"{{this.identifier}}" }
),
tags = ['streamline_realtime_evm_testnet']
) }}
WITH last_3_days AS (
SELECT
GREATEST(ZEROIFNULL(block_number), 67860000) AS block_number
FROM
{{ ref("_evm_testnet_block_lookback") }}
),
tbl AS (
SELECT
block_number
FROM
{{ ref('streamline__evm_testnet_blocks') }}
WHERE
(
block_number >= (
SELECT
block_number
FROM
last_3_days
)
)
AND block_number IS NOT NULL
EXCEPT
SELECT
block_number
FROM
{{ ref('streamline__complete_get_evm_testnet_receipts') }}
WHERE
block_number >= (
SELECT
block_number
FROM
last_3_days
)
AND _inserted_timestamp >= DATEADD(
'day',
-4,
SYSDATE()
)
),
ready_blocks AS (
SELECT
block_number
FROM
tbl
UNION ALL
SELECT
block_number
FROM
{{ ref("_missing_testnet_receipts") }}
)
SELECT
block_number,
DATE_PART(epoch_second, SYSDATE())::STRING AS request_timestamp,
'{{ invocation_id }}' AS _invocation_id,
ROUND(
block_number,
-3
) :: INT AS partition_key,
{{ target.database }}.live.udf_api(
'POST',
'{Service}/{Authentication}',
OBJECT_CONSTRUCT(
'Content-Type',
'application/json'
),
OBJECT_CONSTRUCT(
'id',
block_number,
'jsonrpc',
'2.0',
'method',
'eth_getBlockReceipts',
'params',
ARRAY_CONSTRUCT(
utils.udf_int_to_hex(block_number)
)
),
'Vault/prod/flow/quicknode/testnet'
) AS request
FROM
ready_blocks
ORDER BY
block_number DESC

View File

@ -0,0 +1,101 @@
{{ config (
materialized = "view",
post_hook = fsc_utils.if_data_call_function_v2(
func = 'streamline.udf_bulk_rest_api_v2',
target = "{{this.schema}}.{{this.identifier}}",
params ={ "external_table" :"evm_testnet_traces",
"sql_limit" :"25000",
"producer_batch_size" :"2000",
"worker_batch_size" :"1000",
"sql_source" :"{{this.identifier}}",
"exploded_key": tojson(["result"])}
),
tags = ['streamline_realtime_evm_testnet']
) }}
WITH last_3_days AS (
SELECT
GREATEST(ZEROIFNULL(block_number), 67860000) AS block_number
FROM
{{ ref("_evm_testnet_block_lookback") }}
),
tbl AS (
SELECT
block_number
FROM
{{ ref('streamline__evm_testnet_blocks') }}
WHERE
(
block_number >= (
SELECT
block_number
FROM
last_3_days
)
)
AND block_number IS NOT NULL
EXCEPT
SELECT
block_number
FROM
{{ ref('streamline__complete_get_evm_testnet_traces') }}
WHERE
block_number >= (
SELECT
block_number
FROM
last_3_days
)
AND _inserted_timestamp >= DATEADD(
'day',
-4,
SYSDATE()
)
),
ready_blocks AS (
SELECT
block_number
FROM
tbl
)
SELECT
block_number,
DATE_PART(epoch_second, SYSDATE())::STRING AS request_timestamp,
'{{ invocation_id }}' AS _invocation_id,
ROUND(
block_number,
-3
) :: INT AS partition_key,
{{ target.database }}.live.udf_api(
'POST',
'{Service}/{Authentication}',
OBJECT_CONSTRUCT(
'Content-Type',
'application/json'
),
OBJECT_CONSTRUCT(
'id',
block_number,
'jsonrpc',
'2.0',
'method',
'debug_traceBlockByNumber',
'params',
ARRAY_CONSTRUCT(
utils.udf_int_to_hex(block_number),
OBJECT_CONSTRUCT(
'tracer', 'callTracer',
'timeout', '180s'
)
)
),
'Vault/prod/flow/quicknode/testnet'
) AS request
FROM
ready_blocks
ORDER BY
block_number DESC
limit 25000

View File

@ -0,0 +1,16 @@
{{ config (
materialized = "ephemeral"
) }}
SELECT
DISTINCT block_number AS block_number
FROM
{{ ref("testnet__fact_evm_transactions") }}
WHERE
tx_succeeded IS NULL
AND block_number > (
SELECT
block_number
FROM
{{ ref("_evm_testnet_block_lookback") }}
)

View File

@ -0,0 +1,14 @@
{{ config(
materialized = "view",
tags = ['streamline_realtime_evm_testnet']
) }}
SELECT
_id AS block_number
FROM
{{ source(
'silver_crosschain',
'number_sequence'
) }}
WHERE
_id <= (SELECT block_number FROM {{ ref('streamline__evm_testnet_chainhead') }})

View File

@ -0,0 +1,28 @@
{{ config (
materialized = 'table',
tags = ['streamline_realtime_evm_testnet']
) }}
SELECT
live.udf_api(
'POST',
'{Service}/{Authentication}',
OBJECT_CONSTRUCT(
'Content-Type', 'application/json',
'fsc-quantum-state', 'LiveQuery'
),
OBJECT_CONSTRUCT(
'id',
0,
'jsonrpc',
'2.0',
'method',
'eth_blockNumber',
'params',
[]
),
'Vault/prod/flow/quicknode/testnet'
) AS resp,
utils.udf_hex_to_int(
resp :data :result :: STRING
) AS block_number

View File

@ -1,5 +1,6 @@
{{ config (
materialized = 'incremental',
incremental_predicates = ["dynamic_range_predicate", "block_timestamp_associated::DATE"],
incremental_strategy = 'merge',
merge_exclude_columns = ['inserted_timestamp'],
unique_key = 'dim_address_mapping_id',

View File

@ -2,7 +2,7 @@
materialized = 'incremental',
incremental_strategy = 'merge',
merge_exclude_columns = ['inserted_timestamp'],
incremental_predicates = ["COALESCE(DBT_INTERNAL_DEST.block_timestamp::DATE,'2099-12-31') >= (select min(block_timestamp::DATE) from " ~ generate_tmp_view_name(this) ~ ")"],
incremental_predicates = ["dynamic_range_predicate", "block_timestamp::DATE"],
cluster_by = ['block_timestamp::date', 'modified_timestamp::date'],
unique_key = "ez_token_transfers_id",
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION ON EQUALITY(tx_id,sender,recipient,token_contract);",

View File

@ -54,7 +54,8 @@ models:
- name: TOKEN_CONTRACT
description: "{{ doc('token_contract') }}"
tests:
- not_null
- not_null:
where: tx_succeeded
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- STRING

View File

@ -3,7 +3,7 @@
unique_key = 'ez_transaction_actors_id',
incremental_strategy = 'merge',
merge_exclude_columns = ['inserted_timestamp'],
incremental_predicates = ["COALESCE(DBT_INTERNAL_DEST.block_timestamp::DATE,'2099-12-31') >= (select min(block_timestamp::DATE) from " ~ generate_tmp_view_name(this) ~ ")"],
incremental_predicates = ["dynamic_range_predicate", "block_timestamp::DATE"],
cluster_by = 'block_timestamp::date',
post_hook = 'ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION ON EQUALITY(tx_id,actors);',
tags = ['scheduled_non_core']

View File

@ -5,6 +5,7 @@
materialized = 'incremental',
unique_key = "block_number",
incremental_strategy = 'merge',
incremental_predicates = ["dynamic_range_predicate", "_partition_by_block_id"],
merge_exclude_columns = ["inserted_timestamp"],
cluster_by = "block_timestamp::date",
tags = ['streamline_load', 'core', 'scheduled_core']

View File

@ -3,6 +3,7 @@
materialized = 'incremental',
unique_key = "collection_id",
incremental_strategy = 'merge',
incremental_predicates = ["dynamic_range_predicate", "block_number"],
merge_exclude_columns = ["inserted_timestamp"],
cluster_by = ['_inserted_timestamp :: DATE', 'block_number'],
tags = ['streamline_load', 'core', 'scheduled_core']

View File

@ -2,6 +2,7 @@
materialized = 'incremental',
unique_key = 'event_id',
incremental_strategy = 'merge',
incremental_predicates = ["dynamic_range_predicate", "block_timestamp::date"],
merge_exclude_columns = ["inserted_timestamp"],
cluster_by = "block_timestamp::date",
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION ON EQUALITY(tx_id,event_id,event_contract,event_type);",

View File

@ -1,7 +1,7 @@
-- depends_on: {{ ref('bronze__streamline_transaction_results') }}
{{ config(
materialized = 'incremental',
incremental_predicates = ['DBT_INTERNAL_DEST.block_number >= (select min(block_number) from ' ~ generate_tmp_view_name(this) ~ ')'],
incremental_predicates = ["dynamic_range_predicate", "_partition_by_block_id"],
unique_key = "tx_id",
incremental_strategy = 'merge',
merge_exclude_columns = ["inserted_timestamp"],

View File

@ -3,6 +3,7 @@
materialized = 'incremental',
unique_key = "tx_id",
incremental_strategy = 'merge',
incremental_predicates = ["dynamic_range_predicate", "_partition_by_block_id"],
merge_exclude_columns = ["inserted_timestamp"],
cluster_by = "_inserted_timestamp::date",
tags = ['streamline_load', 'core', 'scheduled_core']

View File

@ -3,6 +3,7 @@
materialized = 'incremental',
unique_key = "tx_id",
incremental_strategy = 'merge',
incremental_predicates = ["dynamic_range_predicate", "_partition_by_block_id"],
merge_exclude_columns = ["inserted_timestamp"],
cluster_by = "block_timestamp::date",
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION ON EQUALITY(tx_id,proposer,payer,authorizers);",

View File

@ -1,5 +1,6 @@
{{ config(
materialized = 'incremental',
incremental_predicates = ["dynamic_range_predicate", "block_timestamp::DATE"],
unique_key = 'flow_evm_address_map_id',
incremental_strategy = 'merge',
merge_exclude_columns = ['inserted_timestamp'],

View File

@ -1,5 +1,6 @@
{{ config(
materialized = 'incremental',
incremental_predicates = ["dynamic_range_predicate", "block_timestamp::DATE"],
incremental_strategy = 'merge',
merge_exclude_columns = ["inserted_timestamp"],
cluster_by = ['inserted_timestamp::DATE'],

View File

@ -1,5 +1,6 @@
{{ config(
materialized = 'incremental',
incremental_predicates = ["dynamic_range_predicate", "block_timestamp::DATE"],
incremental_strategy = 'merge',
merge_exclude_columns = ["inserted_timestamp"],
cluster_by = ['block_timestamp::date'],

View File

@ -2,7 +2,7 @@
materialized = 'incremental',
incremental_strategy = 'merge',
merge_exclude_columns = ['inserted_timestamp'],
incremental_predicates = ["COALESCE(DBT_INTERNAL_DEST.block_timestamp::DATE,'2099-12-31') >= (select min(block_timestamp::DATE) from " ~ generate_tmp_view_name(this) ~ ")"],
incremental_predicates = ["dynamic_range_predicate", "block_timestamp::DATE"],
cluster_by = ['block_timestamp::date', 'modified_timestamp::date'],
unique_key = "token_transfers_id",
tags = ['scheduled_non_core']

View File

@ -1,13 +1,6 @@
version: 2
sources:
- name: prod
database: chainwalkers
schema: prod
tables:
- name: flow_blocks
- name: flow_txs
- name: bronze_streamline
database: streamline
schema: |
@ -133,6 +126,9 @@ sources:
- name: evm_traces_v2
- name: evm_decoded_logs
- name: topshot_metadata
- name: evm_testnet_blocks
- name: evm_testnet_receipts
- name: evm_testnet_traces
- name: crosschain_silver
database: crosschain

View File

@ -25,11 +25,11 @@
ROW_NUMBER() over (
ORDER BY
partition_key DESC,
INDEX ASC
INDEX DESC
) AS rn
FROM
{{ ref('silver_api__transaction_entries') }}
WHERE _inserted_timestamp >= SYSDATE() - INTERVAL '3 days'
WHERE _inserted_timestamp >= SYSDATE() - INTERVAL '14 days'
)
SELECT
entry_id
@ -49,7 +49,7 @@
SELECT
{{ var(
'API_LIMIT',
1000
100
) }} AS api_limit,
'{{ starting_after }}' AS starting_after,
DATE_PART('EPOCH', SYSDATE()) :: INTEGER AS partition_key,

View File

@ -6,11 +6,11 @@ packages:
- package: dbt-labs/dbt_utils
version: 1.0.0
- git: https://github.com/FlipsideCrypto/fsc-utils.git
revision: d3cf679e079f0cf06142de9386f215e55fe26b3b
revision: 87e00eb90acddcc7a34aa8e67e3b3bac86b262e6
- package: get-select/dbt_snowflake_query_tags
version: 2.5.0
- package: calogica/dbt_date
version: 0.7.2
- git: https://github.com/FlipsideCrypto/livequery-models.git
revision: b024188be4e9c6bc00ed77797ebdc92d351d620e
sha1_hash: 3fb8d6ca492a03f5aef6f281508aaa0b34c989d4
revision: 2651a45b7e123f7bd421bcc0e7e2a7bcbaf7652f
sha1_hash: a1cc3545d7ef13fcf5b3908a9e888b4421018792

View File

@ -6,6 +6,6 @@ packages:
- package: dbt-labs/dbt_utils
version: 1.0.0
- git: https://github.com/FlipsideCrypto/fsc-utils.git
revision: v1.32.0
revision: v1.35.1
- package: get-select/dbt_snowflake_query_tags
version: [">=2.0.0", "<3.0.0"]