AN-5796/sl2-upgrade-blast (#102)

* initial model updates

* bronze and sources

* integration test

* revert integration
This commit is contained in:
drethereum 2025-03-12 16:34:06 -06:00 committed by GitHub
parent e902cb402d
commit e46327a3e9
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
88 changed files with 2625 additions and 1498 deletions

View File

@ -43,4 +43,8 @@ jobs:
dbt deps
- name: Run DBT Jobs
run: |
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "blast_models,tag:streamline_core_complete" "blast_models,tag:streamline_core_realtime"
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "blast_models,tag:streamline_core_complete" "blast_models,tag:streamline_core_realtime" "blast_models,tag:streamline_core_complete_receipts" "blast_models,tag:streamline_core_realtime_receipts" "blast_models,tag:streamline_core_complete_confirm_blocks" "blast_models,tag:streamline_core_realtime_confirm_blocks"
- name: Run Chainhead Tests
run: |
dbt test -m "blast_models,tag:chainhead"

View File

@ -29,7 +29,7 @@ on:
description: 'DBT Run Command'
required: true
options:
- dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "blast_models,tag:streamline_core_history" "blast_models,tag:streamline_core_complete"
- dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "blast_models,tag:streamline_core_complete" "blast_models,tag:streamline_core_history" "blast_models,tag:streamline_core_complete_receipts" "blast_models,tag:streamline_core_history_receipts" "blast_models,tag:streamline_core_complete_confirm_blocks" "blast_models,tag:streamline_core_history_confirm_blocks"
env:
DBT_PROFILES_DIR: ./

View File

@ -80,12 +80,70 @@ vars:
dev:
API_INTEGRATION: AWS_BLAST_API_DEV
EXTERNAL_FUNCTION_URI: y9d0tuavh6.execute-api.us-east-1.amazonaws.com/stg/
ROLES:
- AWS_LAMBDA_BLAST_API
- INTERNAL_DEV
prod:
API_INTEGRATION: AWS_BLAST_API
EXTERNAL_FUNCTION_URI: 42gzudc5si.execute-api.us-east-1.amazonaws.com/prod/
ROLES:
- AWS_LAMBDA_BLAST_API
- INTERNAL_DEV
- DBT_CLOUD_BLAST
#### STREAMLINE 2.0 END ####
### FSC_EVM
#### FSC_EVM BEGIN ####
# Visit https://github.com/FlipsideCrypto/fsc-evm/wiki for more information on required and optional variables
### GLOBAL VARIABLES BEGIN ###
## REQUIRED
GLOBAL_PROD_DB_NAME: 'blast'
GLOBAL_NODE_SECRET_PATH: 'Vault/prod/blast/quicknode/mainnet'
GLOBAL_BLOCKS_PER_HOUR: 1800
### GLOBAL VARIABLES END ###
### MAIN_PACKAGE VARIABLES BEGIN ###
### CORE ###
## REQUIRED
## OPTIONAL
# GOLD_FULL_REFRESH: True
# SILVER_FULL_REFRESH: True
# BRONZE_FULL_REFRESH: True
# BLOCKS_COMPLETE_FULL_REFRESH: True
# CONFIRM_BLOCKS_COMPLETE_FULL_REFRESH: True
# TRACES_COMPLETE_FULL_REFRESH: True
# RECEIPTS_COMPLETE_FULL_REFRESH: True
# TRANSACTIONS_COMPLETE_FULL_REFRESH: True
# BLOCKS_TRANSACTIONS_REALTIME_TESTING_LIMIT: 3
# BLOCKS_TRANSACTIONS_HISTORY_TESTING_LIMIT: 3
# TRACES_REALTIME_TESTING_LIMIT: 3
# TRACES_HISTORY_TESTING_LIMIT: 3
# ARBTRACE_BLOCK_HISTORY_TESTING_LIMIT: 3
# RECEIPTS_REALTIME_TESTING_LIMIT: 3
# RECEIPTS_HISTORY_TESTING_LIMIT: 3
# CONFIRM_BLOCKS_REALTIME_TESTING_LIMIT: 3
# CONFIRM_BLOCKS_HISTORY_TESTING_LIMIT: 3
# ### MAIN_PACKAGE VARIABLES END ###
# ### DECODER_PACKAGE VARIABLES BEGIN ###
# ## REQUIRED
# ## OPTIONAL
# DECODED_LOGS_COMPLETE_FULL_REFRESH: True
# DECODED_LOGS_REALTIME_TESTING_LIMIT: 3
# DECODED_LOGS_HISTORY_SQL_LIMIT: 1 #limit per monthly range
### DECODER_PACKAGE VARIABLES END ###
#### FSC_EVM END ####

View File

@ -1,20 +1,18 @@
{% macro decoded_logs_history(backfill_mode=false) %}
{%- set params = {
"sql_limit": var("DECODED_LOGS_HISTORY_SQL_LIMIT", 7500000),
"sql_limit": var("DECODED_LOGS_HISTORY_SQL_LIMIT", 8000000),
"producer_batch_size": var("DECODED_LOGS_HISTORY_PRODUCER_BATCH_SIZE", 400000),
"worker_batch_size": var("DECODED_LOGS_HISTORY_WORKER_BATCH_SIZE", 100000)
} -%}
{% set wait_time = var("DECODED_LOGS_HISTORY_WAIT_TIME", 60) %}
{% set find_months_query %}
SELECT
DISTINCT date_trunc('month', block_timestamp)::date as month
FROM {{ ref('core__fact_blocks') }}
ORDER BY month ASC
{% endset %}
{% set results = run_query(find_months_query) %}
{% if execute %}
@ -45,7 +43,7 @@
),
existing_logs_to_exclude AS (
SELECT _log_id
FROM {{ ref('streamline__complete_decode_logs') }} l
FROM {{ ref('streamline__decoded_logs_complete') }} l
INNER JOIN target_blocks b using (block_number)
),
candidate_logs AS (
@ -83,11 +81,9 @@
LIMIT {{ params.sql_limit }}
)
{% endset %}
{# Create the view #}
{% do run_query(create_view_query) %}
{{ log("Created view for month " ~ month.strftime('%Y-%m'), info=True) }}
{% if var("STREAMLINE_INVOKE_STREAMS", false) %}
{# Check if rows exist first #}
{% set check_rows_query %}
@ -98,21 +94,22 @@
{% set has_rows = results.columns[0].values()[0] %}
{% if has_rows %}
{# Invoke streamline since rows exist to decode #}
{# Invoke streamline, if rows exist to decode #}
{% set decode_query %}
SELECT streamline.udf_bulk_decode_logs(
object_construct(
'sql_source', '{{view_name}}',
'external_table', 'DECODED_LOGS',
'sql_limit', {{ params.sql_limit }},
'producer_batch_size', {{ params.producer_batch_size }},
'worker_batch_size', {{ params.worker_batch_size }})
SELECT
streamline.udf_bulk_decode_logs_v2(
PARSE_JSON(
$${ "external_table": "decoded_logs",
"producer_batch_size": {{ params.producer_batch_size }},
"sql_limit": {{ params.sql_limit }},
"sql_source": "{{view_name}}",
"worker_batch_size": {{ params.worker_batch_size }} }$$
)
);
{% endset %}
{% do run_query(decode_query) %}
{{ log("Triggered decoding for month " ~ month.strftime('%Y-%m'), info=True) }}
{# Call wait since we actually did some decoding #}
{% do run_query("call system$wait(" ~ wait_time ~ ")") %}
{{ log("Completed wait after decoding for month " ~ month.strftime('%Y-%m'), info=True) }}

View File

@ -8,9 +8,7 @@
where _inserted_timestamp::date = sysdate()::date
and dayname(sysdate()) <> 'Sat'
{% endset %}
{% set results = run_query(check_for_new_user_abis_query) %}
{% if execute %}
{% set new_user_abis = results.columns[0].values()[0] %}

View File

@ -0,0 +1,101 @@
{% macro streamline_external_table_query_decoder(
source_name,
source_version
) %}
{% if source_version != '' %}
{% set source_version = '_' ~ source_version.lower() %}
{% endif %}
WITH meta AS (
SELECT
job_created_time AS _inserted_timestamp,
file_name,
CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 6), '_', 1) AS INTEGER) AS _partition_by_block_number,
TO_DATE(
concat_ws('-', SPLIT_PART(file_name, '/', 3), SPLIT_PART(file_name, '/', 4), SPLIT_PART(file_name, '/', 5))
) AS _partition_by_created_date
FROM
TABLE(
information_schema.external_table_file_registration_history(
start_time => DATEADD('day', -3, CURRENT_TIMESTAMP()),
table_name => '{{ source( "bronze_streamline", source_name ~ source_version) }}')
) A
)
SELECT
block_number,
id :: STRING AS id,
DATA,
metadata,
b.file_name,
_inserted_timestamp,
s._partition_by_block_number AS _partition_by_block_number,
s._partition_by_created_date AS _partition_by_created_date
FROM
{{ source(
"bronze_streamline",
source_name ~ source_version
) }}
s
JOIN meta b
ON b.file_name = metadata$filename
AND b._partition_by_block_number = s._partition_by_block_number
AND b._partition_by_created_date = s._partition_by_created_date
WHERE
b._partition_by_block_number = s._partition_by_block_number
AND b._partition_by_created_date = s._partition_by_created_date
AND s._partition_by_created_date >= DATEADD('day', -2, CURRENT_TIMESTAMP())
AND DATA :error IS NULL
AND DATA IS NOT NULL
{% endmacro %}
{% macro streamline_external_table_query_decoder_fr(
source_name,
source_version
) %}
{% if source_version != '' %}
{% set source_version = '_' ~ source_version.lower() %}
{% endif %}
WITH meta AS (
SELECT
registered_on AS _inserted_timestamp,
file_name,
CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 6), '_', 1) AS INTEGER) AS _partition_by_block_number,
TO_DATE(
concat_ws('-', SPLIT_PART(file_name, '/', 3), SPLIT_PART(file_name, '/', 4), SPLIT_PART(file_name, '/', 5))
) AS _partition_by_created_date
FROM
TABLE(
information_schema.external_table_files(
table_name => '{{ source( "bronze_streamline", source_name ~ source_version) }}'
)
) A
)
SELECT
block_number,
id :: STRING AS id,
DATA,
metadata,
b.file_name,
_inserted_timestamp,
s._partition_by_block_number AS _partition_by_block_number,
s._partition_by_created_date AS _partition_by_created_date
FROM
{{ source(
"bronze_streamline",
source_name ~ source_version
) }}
s
JOIN meta b
ON b.file_name = metadata$filename
AND b._partition_by_block_number = s._partition_by_block_number
AND b._partition_by_created_date = s._partition_by_created_date
WHERE
b._partition_by_block_number = s._partition_by_block_number
AND b._partition_by_created_date = s._partition_by_created_date
AND DATA :error IS NULL
AND DATA IS NOT NULL
{% endmacro %}

View File

@ -0,0 +1,141 @@
{% macro streamline_external_table_query(
source_name,
source_version,
partition_function,
balances,
block_number,
uses_receipts_by_hash
) %}
{% if source_version != '' %}
{% set source_version = '_' ~ source_version.lower() %}
{% endif %}
WITH meta AS (
SELECT
job_created_time AS _inserted_timestamp,
file_name,
{{ partition_function }} AS partition_key
FROM
TABLE(
information_schema.external_table_file_registration_history(
start_time => DATEADD('day', -3, CURRENT_TIMESTAMP()),
table_name => '{{ source( "bronze_streamline", source_name ~ source_version) }}')
) A
)
SELECT
s.*,
b.file_name,
b._inserted_timestamp
{% if balances %},
r.block_timestamp :: TIMESTAMP AS block_timestamp
{% endif %}
{% if block_number %},
COALESCE(
s.value :"BLOCK_NUMBER" :: STRING,
s.metadata :request :"data" :id :: STRING,
PARSE_JSON(
s.metadata :request :"data"
) :id :: STRING
) :: INT AS block_number
{% endif %}
{% if uses_receipts_by_hash %},
s.value :"TX_HASH" :: STRING AS tx_hash
{% endif %}
FROM
{{ source(
"bronze_streamline",
source_name ~ source_version
) }}
s
JOIN meta b
ON b.file_name = metadata$filename
AND b.partition_key = s.partition_key
{% if balances %}
JOIN {{ ref('_block_ranges') }}
r
ON r.block_number = COALESCE(
s.value :"BLOCK_NUMBER" :: INT,
s.value :"block_number" :: INT
)
{% endif %}
WHERE
b.partition_key = s.partition_key
AND DATA :error IS NULL
AND DATA IS NOT NULL
{% endmacro %}
{% macro streamline_external_table_query_fr(
source_name,
source_version,
partition_function,
partition_join_key,
balances,
block_number,
uses_receipts_by_hash
) %}
{% if source_version != '' %}
{% set source_version = '_' ~ source_version.lower() %}
{% endif %}
WITH meta AS (
SELECT
registered_on AS _inserted_timestamp,
file_name,
{{ partition_function }} AS partition_key
FROM
TABLE(
information_schema.external_table_files(
table_name => '{{ source( "bronze_streamline", source_name ~ source_version) }}'
)
) A
)
SELECT
s.*,
b.file_name,
b._inserted_timestamp
{% if balances %},
r.block_timestamp :: TIMESTAMP AS block_timestamp
{% endif %}
{% if block_number %},
COALESCE(
s.value :"BLOCK_NUMBER" :: STRING,
s.value :"block_number" :: STRING,
s.metadata :request :"data" :id :: STRING,
PARSE_JSON(
s.metadata :request :"data"
) :id :: STRING
) :: INT AS block_number
{% endif %}
{% if uses_receipts_by_hash %},
s.value :"TX_HASH" :: STRING AS tx_hash
{% endif %}
FROM
{{ source(
"bronze_streamline",
source_name ~ source_version
) }}
s
JOIN meta b
ON b.file_name = metadata$filename
AND b.partition_key = s.{{ partition_join_key }}
{% if balances %}
JOIN {{ ref('_block_ranges') }}
r
ON r.block_number = COALESCE(
s.value :"BLOCK_NUMBER" :: INT,
s.value :"block_number" :: INT
)
{% endif %}
WHERE
b.partition_key = s.{{ partition_join_key }}
AND DATA :error IS NULL
AND DATA IS NOT NULL
{% endmacro %}

View File

@ -0,0 +1,36 @@
{% macro log_bronze_details(source_name, source_version, model_type, partition_function, partition_join_key, block_number, uses_receipts_by_hash) %}
{% if source_version != '' %}
{% set source_version = '_' ~ source_version.lower() %}
{% endif %}
{% if model_type != '' %}
{% set model_type = '_' ~ model_type %}
{% endif %}
{%- if flags.WHICH == 'compile' and execute -%}
{{ log("=== Current Variable Settings ===", info=True) }}
{{ log(source_name ~ model_type ~ '_PARTITION_FUNCTION: ' ~ partition_function, info=True) }}
{{ log(source_name ~ model_type ~ '_PARTITION_JOIN_KEY: ' ~ partition_join_key, info=True) }}
{{ log(source_name ~ model_type ~ '_BLOCK_NUMBER: ' ~ block_number, info=True) }}
{% if uses_receipts_by_hash %}
{{ log("USES_RECEIPTS_BY_HASH: " ~ uses_receipts_by_hash, info=True) }}
{% endif %}
{{ log("", info=True) }}
{{ log("=== Source Details ===", info=True) }}
{{ log("Source: " ~ source('bronze_streamline', source_name.lower() ~ source_version.lower()), info=True) }}
{{ log("", info=True) }}
{% set config_log = '\n' %}
{% set config_log = config_log ~ '\n=== DBT Model Config ===\n'%}
{% set config_log = config_log ~ '\n{{ config (\n' %}
{% set config_log = config_log ~ ' materialized = "' ~ config.get('materialized') ~ '",\n' %}
{% set config_log = config_log ~ ' tags = ' ~ config.get('tags') | tojson ~ '\n' %}
{% set config_log = config_log ~ ') }}\n' %}
{{ log(config_log, info=True) }}
{{ log("", info=True) }}
{%- endif -%}
{% endmacro %}

View File

@ -0,0 +1,29 @@
{% macro log_complete_details(post_hook, full_refresh_type, uses_receipts_by_hash) %}
{%- if flags.WHICH == 'compile' and execute -%}
{% if uses_receipts_by_hash %}
{{ log("=== Current Variable Settings ===", info=True) }}
{{ log("USES_RECEIPTS_BY_HASH: " ~ uses_receipts_by_hash, info=True) }}
{% endif %}
{% set config_log = '\n' %}
{% set config_log = config_log ~ '\n=== DBT Model Config ===\n'%}
{% set config_log = config_log ~ '\n{{ config (\n' %}
{% set config_log = config_log ~ ' materialized = "' ~ config.get('materialized') ~ '",\n' %}
{% set config_log = config_log ~ ' unique_key = "' ~ config.get('unique_key') ~ '",\n' %}
{% set config_log = config_log ~ ' cluster_by = "' ~ config.get('cluster_by') ~ '",\n' %}
{% set config_log = config_log ~ ' merge_update_columns = ' ~ config.get('merge_update_columns') | tojson ~ ',\n' %}
{% set config_log = config_log ~ ' post_hook = "' ~ post_hook ~ '",\n' %}
{% set config_log = config_log ~ ' incremental_predicates = ' ~ config.get('incremental_predicates') | tojson ~ ',\n' %}
{% set config_log = config_log ~ ' full_refresh = ' ~ full_refresh_type ~ ',\n' %}
{% set config_log = config_log ~ ' tags = ' ~ config.get('tags') | tojson ~ '\n' %}
{% set config_log = config_log ~ ') }}\n' %}
{{ log(config_log, info=True) }}
{{ log("", info=True) }}
{%- endif -%}
{% endmacro %}

View File

@ -0,0 +1,36 @@
{% macro log_model_details(vars=false, params=false) %}
{%- if execute -%}
/*
DBT Model Config:
{{ model.config | tojson(indent=2) }}
*/
{% if vars is not false %}
{% if var('LOG_MODEL_DETAILS', false) %}
{{ log( vars | tojson(indent=2), info=True) }}
{% endif %}
/*
Variables:
{{ vars | tojson(indent=2) }}
*/
{% endif %}
{% if params is not false %}
{% if var('LOG_MODEL_DETAILS', false) %}
{{ log( params | tojson(indent=2), info=True) }}
{% endif %}
/*
Parameters:
{{ params | tojson(indent=2) }}
*/
{% endif %}
/*
Raw Code:
{{ model.raw_code }}
*/
{%- endif -%}
{% endmacro %}

View File

@ -0,0 +1,55 @@
{% macro log_streamline_details(model_name, model_type, node_url, model_quantum_state, sql_limit, testing_limit, order_by_clause, new_build, streamline_params, uses_receipts_by_hash, method, method_params, min_block=0) %}
{%- if flags.WHICH == 'compile' and execute -%}
{{ log("=== Current Variable Settings ===", info=True) }}
{{ log("START_UP_BLOCK: " ~ min_block, info=True) }}
{{ log("", info=True) }}
{{ log("=== API Details ===", info=True) }}
{{ log("NODE_URL: " ~ node_url, info=True) }}
{{ log("NODE_SECRET_PATH: " ~ var('GLOBAL_NODE_SECRET_PATH'), info=True) }}
{{ log("", info=True) }}
{{ log("=== Current Variable Settings ===", info=True) }}
{{ log((model_name ~ '_' ~ model_type ~ '_model_quantum_state').upper() ~ ': ' ~ model_quantum_state, info=True) }}
{{ log((model_name ~ '_' ~ model_type ~ '_sql_limit').upper() ~ ': ' ~ sql_limit, info=True) }}
{{ log((model_name ~ '_' ~ model_type ~ '_testing_limit').upper() ~ ': ' ~ testing_limit, info=True) }}
{{ log((model_name ~ '_' ~ model_type ~ '_order_by_clause').upper() ~ ': ' ~ order_by_clause, info=True) }}
{{ log((model_name ~ '_' ~ model_type ~ '_new_build').upper() ~ ': ' ~ new_build, info=True) }}
{{ log('USES_RECEIPTS_BY_HASH' ~ ': ' ~ uses_receipts_by_hash, info=True) }}
{{ log("", info=True) }}
{{ log("=== RPC Details ===", info=True) }}
{{ log(model_name ~ ": {", info=True) }}
{{ log(" method: '" ~ method ~ "',", info=True) }}
{{ log(" method_params: " ~ method_params, info=True) }}
{{ log("}", info=True) }}
{{ log("", info=True) }}
{% set params_str = streamline_params | tojson %}
{% set params_formatted = params_str | replace('{', '{\n ') | replace('}', '\n }') | replace(', ', ',\n ') %}
{# Clean up the method_params formatting #}
{% set params_formatted = params_formatted | replace('"method_params": "', '"method_params": "') | replace('\\n', ' ') | replace('\\u0027', "'") %}
{% set config_log = '\n' %}
{% set config_log = config_log ~ '\n=== DBT Model Config ===\n'%}
{% set config_log = config_log ~ '\n{{ config (\n' %}
{% set config_log = config_log ~ ' materialized = "' ~ config.get('materialized') ~ '",\n' %}
{% set config_log = config_log ~ ' post_hook = fsc_utils.if_data_call_function_v2(\n' %}
{% set config_log = config_log ~ ' func = "streamline.udf_bulk_rest_api_v2",\n' %}
{% set config_log = config_log ~ ' target = "' ~ this.schema ~ '.' ~ this.identifier ~ '",\n' %}
{% set config_log = config_log ~ ' params = ' ~ params_formatted ~ '\n' %}
{% set config_log = config_log ~ ' ),\n' %}
{% set config_log = config_log ~ ' tags = ' ~ config.get('tags') | tojson ~ '\n' %}
{% set config_log = config_log ~ ') }}\n' %}
{{ log(config_log, info=True) }}
{{ log("", info=True) }}
{%- endif -%}
{% endmacro %}

View File

@ -0,0 +1,47 @@
{% macro set_default_variables_streamline(model_name, model_type) %}
{%- set node_url = var('GLOBAL_NODE_URL', '{Service}/{Authentication}') -%}
{%- set node_secret_path = var('GLOBAL_NODE_SECRET_PATH', '') -%}
{%- set model_quantum_state = var((model_name ~ '_' ~ model_type ~ '_quantum_state').upper(), 'streamline') -%}
{%- set testing_limit = var((model_name ~ '_' ~ model_type ~ '_testing_limit').upper(), none) -%}
{%- set new_build = var((model_name ~ '_' ~ model_type ~ '_new_build').upper(), false) -%}
{%- set default_order = 'ORDER BY partition_key DESC, block_number DESC' if model_type.lower() == 'realtime'
else 'ORDER BY partition_key ASC, block_number ASC' -%}
{%- set order_by_clause = var((model_name ~ '_' ~ model_type ~ '_order_by_clause').upper(), default_order) -%}
{%- set uses_receipts_by_hash = var('GLOBAL_USES_RECEIPTS_BY_HASH', false) -%}
{%- set variables = {
'node_url': node_url,
'node_secret_path': node_secret_path,
'model_quantum_state': model_quantum_state,
'testing_limit': testing_limit,
'new_build': new_build,
'order_by_clause': order_by_clause,
'uses_receipts_by_hash': uses_receipts_by_hash
} -%}
{{ return(variables) }}
{% endmacro %}
{% macro set_default_variables_bronze(source_name, model_type) %}
{%- set partition_function = var(source_name ~ model_type ~ '_PARTITION_FUNCTION',
"CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER)")
-%}
{%- set partition_join_key = var(source_name ~ model_type ~ '_PARTITION_JOIN_KEY', 'partition_key') -%}
{%- set block_number = var(source_name ~ model_type ~ '_BLOCK_NUMBER', true) -%}
{%- set balances = var(source_name ~ model_type ~ '_BALANCES', false) -%}
{%- set uses_receipts_by_hash = var('GLOBAL_USES_RECEIPTS_BY_HASH', false) -%}
{%- set variables = {
'partition_function': partition_function,
'partition_join_key': partition_join_key,
'block_number': block_number,
'balances': balances,
'uses_receipts_by_hash': uses_receipts_by_hash
} -%}
{{ return(variables) }}
{% endmacro %}

View File

@ -0,0 +1,57 @@
{% macro set_streamline_parameters(model_name, model_type, multiplier=1) %}
{%- set rpc_config_details = {
"blocks_transactions": {
"method": 'eth_getBlockByNumber',
"method_params": 'ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number), TRUE)',
"exploded_key": ['result', 'result.transactions']
},
"receipts_by_hash": {
"method": 'eth_getTransactionReceipt',
"method_params": 'ARRAY_CONSTRUCT(tx_hash)'
},
"receipts": {
"method": 'eth_getBlockReceipts',
"method_params": 'ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number))',
"exploded_key": ['result'],
"lambdas": 2
},
"traces": {
"method": 'debug_traceBlockByNumber',
"method_params": "ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number), OBJECT_CONSTRUCT('tracer', 'callTracer', 'timeout', '120s'))",
"exploded_key": ['result'],
"lambdas": 2
},
"confirm_blocks": {
"method": 'eth_getBlockByNumber',
"method_params": 'ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number), FALSE)'
}
} -%}
{%- set rpc_config = rpc_config_details[model_name.lower()] -%}
{%- set params = {
"external_table": var((model_name ~ '_' ~ model_type ~ '_external_table').upper(), model_name.lower()),
"sql_limit": var((model_name ~ '_' ~ model_type ~ '_sql_limit').upper(), 2 * var('GLOBAL_BLOCKS_PER_HOUR',0) * multiplier),
"producer_batch_size": var((model_name ~ '_' ~ model_type ~ '_producer_batch_size').upper(), 2 * var('GLOBAL_BLOCKS_PER_HOUR',0) * multiplier),
"worker_batch_size": var(
(model_name ~ '_' ~ model_type ~ '_worker_batch_size').upper(),
(2 * var('GLOBAL_BLOCKS_PER_HOUR',0) * multiplier) // (rpc_config.get('lambdas', 1))
),
"sql_source": (model_name ~ '_' ~ model_type).lower(),
"method": rpc_config['method'],
"method_params": rpc_config['method_params']
} -%}
{%- if rpc_config.get('exploded_key') is not none -%}
{%- do params.update({"exploded_key": tojson(rpc_config['exploded_key'])}) -%}
{%- endif -%}
{%- if rpc_config.get('lambdas') is not none -%}
{%- do params.update({"lambdas": rpc_config['lambdas']}) -%}
{%- endif -%}
{{ return(params) }}
{% endmacro %}

View File

@ -1,186 +0,0 @@
{% macro streamline_external_table_query(
model,
partition_function,
partition_name,
unique_key
) %}
WITH meta AS (
SELECT
last_modified AS _inserted_timestamp,
file_name,
{{ partition_function }} AS {{ partition_name }}
FROM
TABLE(
information_schema.external_table_file_registration_history(
start_time => DATEADD('day', -3, CURRENT_TIMESTAMP()),
table_name => '{{ source( "bronze_streamline", model) }}')
) A
)
SELECT
{{ unique_key }} AS block_number,
DATA,
_inserted_timestamp,
MD5(
CAST(
COALESCE(CAST({{ unique_key }} AS text), '' :: STRING) AS text
)
) AS id,
s.{{ partition_name }},
s.value AS VALUE
FROM
{{ source(
"bronze_streamline",
model
) }}
s
JOIN meta b
ON b.file_name = metadata$filename
AND b.{{ partition_name }} = s.{{ partition_name }}
WHERE
b.{{ partition_name }} = s.{{ partition_name }}
AND (
DATA :error :code IS NULL
OR DATA :error :code NOT IN (
'-32000',
'-32001',
'-32002',
'-32003',
'-32004',
'-32005',
'-32006',
'-32007',
'-32008',
'-32009',
'-32010',
'-32608'
)
)
{% endmacro %}
{% macro streamline_external_table_fr_query(
model,
partition_function,
partition_name,
unique_key
) %}
WITH meta AS (
SELECT
registered_on AS _inserted_timestamp,
file_name,
{{ partition_function }} AS {{ partition_name }}
FROM
TABLE(
information_schema.external_table_files(
table_name => '{{ source( "bronze_streamline", model) }}'
)
) A
)
SELECT
{{ unique_key }} AS block_number,
DATA,
_inserted_timestamp,
MD5(
CAST(
COALESCE(CAST({{ unique_key }} AS text), '' :: STRING) AS text
)
) AS id,
s.{{ partition_name }},
s.value AS VALUE
FROM
{{ source(
"bronze_streamline",
model
) }}
s
JOIN meta b
ON b.file_name = metadata$filename
AND b.{{ partition_name }} = s.{{ partition_name }}
WHERE
b.{{ partition_name }} = s.{{ partition_name }}
AND (
DATA :error :code IS NULL
OR DATA :error :code NOT IN (
'-32000',
'-32001',
'-32002',
'-32003',
'-32004',
'-32005',
'-32006',
'-32007',
'-32008',
'-32009',
'-32010',
'-32608'
)
)
{% endmacro %}
{% macro streamline_external_table_query_v2(
model,
partition_function
) %}
WITH meta AS (
SELECT
job_created_time AS _inserted_timestamp,
file_name,
{{ partition_function }} AS partition_key
FROM
TABLE(
information_schema.external_table_file_registration_history(
start_time => DATEADD('day', -3, CURRENT_TIMESTAMP()),
table_name => '{{ source( "bronze_streamline", model) }}')
) A
)
SELECT
s.*,
b.file_name,
_inserted_timestamp
FROM
{{ source(
"bronze_streamline",
model
) }}
s
JOIN meta b
ON b.file_name = metadata$filename
AND b.partition_key = s.partition_key
WHERE
b.partition_key = s.partition_key
AND DATA :error IS NULL
{% endmacro %}
{% macro streamline_external_table_fr_query_v2(
model,
partition_function
) %}
WITH meta AS (
SELECT
registered_on AS _inserted_timestamp,
file_name,
{{ partition_function }} AS partition_key
FROM
TABLE(
information_schema.external_table_files(
table_name => '{{ source( "bronze_streamline", model) }}'
)
) A
)
SELECT
s.*,
b.file_name,
_inserted_timestamp
FROM
{{ source(
"bronze_streamline",
model
) }}
s
JOIN meta b
ON b.file_name = metadata$filename
AND b.partition_key = s.partition_key
WHERE
b.partition_key = s.partition_key
AND DATA :error IS NULL
{% endmacro %}

View File

@ -50,7 +50,18 @@ row_nos AS (
),
batched AS ({% for item in range(501) %}
SELECT
rn.contract_address, live.udf_api('GET', CONCAT('https://api.blastscan.io/api?module=contract&action=getabi&address=', rn.contract_address, '&apikey={key}'),{ 'User-Agent': 'FlipsideStreamline' },{}, 'Vault/prod/block_explorers/blast_scan') AS abi_data, SYSDATE() AS _inserted_timestamp
rn.contract_address,
live.udf_api(
'GET',
CONCAT('https://api.blastscan.io/api?module=contract&action=getabi&address=',rn.contract_address,'&apikey={key}'),
OBJECT_CONSTRUCT(
'Content-Type', 'application/json',
'fsc-quantum-state', 'livequery'
),
NULL,
'Vault/prod/block_explorers/blast_scan'
) AS abi_data,
SYSDATE() AS _inserted_timestamp
FROM
row_nos rn
WHERE

View File

@ -2,7 +2,7 @@
materialized = "incremental",
incremental_strategy = 'delete+insert',
unique_key = "block_number",
incremental_predicates = [fsc_evm.standard_predicate()],
incremental_predicates = [standard_predicate()],
cluster_by = "block_timestamp::date",
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION",
tags = ['non_realtime','core'],

View File

@ -1,4 +1,4 @@
-- depends_on: {{ ref('bronze__streamline_blocks') }}
-- depends_on: {{ ref('bronze__blocks') }}
{{ config(
materialized = 'incremental',
unique_key = "block_number",
@ -19,48 +19,48 @@ SELECT
) :id :: INT
) AS block_number,
utils.udf_hex_to_int(
DATA :result :baseFeePerGas :: STRING
DATA :baseFeePerGas :: STRING
) :: INT AS base_fee_per_gas,
utils.udf_hex_to_int(
DATA :result :difficulty :: STRING
DATA :difficulty :: STRING
) :: INT AS difficulty,
DATA :result :extraData :: STRING AS extra_data,
DATA :extraData :: STRING AS extra_data,
utils.udf_hex_to_int(
DATA :result :gasLimit :: STRING
DATA :gasLimit :: STRING
) :: INT AS gas_limit,
utils.udf_hex_to_int(
DATA :result :gasUsed :: STRING
DATA :gasUsed :: STRING
) :: INT AS gas_used,
DATA :result :hash :: STRING AS HASH,
DATA :result :logsBloom :: STRING AS logs_bloom,
DATA :result :miner :: STRING AS miner,
DATA :result :mixHash :: STRING AS mixHash,
DATA :hash :: STRING AS HASH,
DATA :logsBloom :: STRING AS logs_bloom,
DATA :miner :: STRING AS miner,
DATA :mixHash :: STRING AS mixHash,
utils.udf_hex_to_int(
DATA :result :nonce :: STRING
DATA :nonce :: STRING
) :: INT AS nonce,
utils.udf_hex_to_int(
DATA :result :number :: STRING
DATA :number :: STRING
) :: INT AS NUMBER,
DATA :result :parentHash :: STRING AS parent_hash,
DATA :result :receiptsRoot :: STRING AS receipts_root,
DATA :result :sha3Uncles :: STRING AS sha3_uncles,
DATA :parentHash :: STRING AS parent_hash,
DATA :receiptsRoot :: STRING AS receipts_root,
DATA :sha3Uncles :: STRING AS sha3_uncles,
utils.udf_hex_to_int(
DATA :result :size :: STRING
DATA :size :: STRING
) :: INT AS SIZE,
DATA :result :stateRoot :: STRING AS state_root,
DATA :stateRoot :: STRING AS state_root,
utils.udf_hex_to_int(
DATA :result :timestamp :: STRING
DATA :timestamp :: STRING
) :: TIMESTAMP AS block_timestamp,
utils.udf_hex_to_int(
DATA :result :totalDifficulty :: STRING
DATA :totalDifficulty :: STRING
) :: INT AS total_difficulty,
ARRAY_SIZE(
DATA :result :transactions
DATA :transactions
) AS tx_count,
DATA :result :transactionsRoot :: STRING AS transactions_root,
DATA :result :uncles AS uncles,
DATA :result :withdrawals AS withdrawals,
DATA :result :withdrawalsRoot :: STRING AS withdrawals_root,
DATA :transactionsRoot :: STRING AS transactions_root,
DATA :uncles AS uncles,
DATA :withdrawals AS withdrawals,
DATA :withdrawalsRoot :: STRING AS withdrawals_root,
_inserted_timestamp,
{{ dbt_utils.generate_surrogate_key(
['block_number']
@ -71,7 +71,7 @@ SELECT
FROM
{% if is_incremental() %}
{{ ref('bronze__streamline_blocks') }}
{{ ref('bronze__blocks') }}
WHERE
_inserted_timestamp >= (
SELECT
@ -80,7 +80,7 @@ WHERE
{{ this }}
)
{% else %}
{{ ref('bronze__streamline_fr_blocks') }}
{{ ref('bronze__blocks_fr') }}
{% endif %}
qualify(ROW_NUMBER() over (PARTITION BY block_number

View File

@ -1,4 +1,4 @@
-- depends_on: {{ ref('bronze__streamline_confirm_blocks') }}
-- depends_on: {{ ref('bronze__confirm_blocks') }}
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
@ -23,7 +23,7 @@ WITH base AS (
FROM
{% if is_incremental() %}
{{ ref('bronze__streamline_confirm_blocks') }}
{{ ref('bronze__confirm_blocks') }}
WHERE
_inserted_timestamp >= (
SELECT
@ -37,7 +37,7 @@ WHERE
{{ this }}
)
{% else %}
{{ ref('bronze__streamline_fr_confirm_blocks') }}
{{ ref('bronze__confirm_blocks_fr') }}
{% endif %}
qualify(ROW_NUMBER() over (PARTITION BY block_number

View File

@ -42,7 +42,7 @@ WHERE
)
AND DATA NOT ILIKE '%Event topic is not present in given ABI%'
{% else %}
{{ ref('bronze__fr_decoded_logs') }}
{{ ref('bronze__decoded_logs_fr') }}
WHERE
DATA NOT ILIKE '%Event topic is not present in given ABI%'
{% endif %}

View File

@ -1,4 +1,4 @@
-- depends_on: {{ ref('bronze__streamline_receipts') }}
-- depends_on: {{ ref('bronze__receipts') }}
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
@ -24,7 +24,7 @@ WITH base AS (
FROM
{% if is_incremental() %}
{{ ref('bronze__streamline_receipts') }}
{{ ref('bronze__receipts') }}
WHERE
_inserted_timestamp >= (
SELECT
@ -34,7 +34,7 @@ WHERE
)
AND IS_OBJECT(DATA)
{% else %}
{{ ref('bronze__streamline_fr_receipts') }}
{{ ref('bronze__receipts_fr') }}
WHERE
IS_OBJECT(DATA)
{% endif %}

View File

@ -1,4 +1,4 @@
-- depends_on: {{ ref('bronze__streamline_traces') }}
-- depends_on: {{ ref('bronze__traces') }}
{{ config (
materialized = "incremental",
incremental_strategy = 'delete+insert',
@ -8,12 +8,7 @@
tags = ['non_realtime'],
full_refresh = false
) }}
{# {{ fsc_evm.silver_traces_v1(
full_reload_start_block = 3000000,
full_reload_blocks = 1000000,
use_partition_key = TRUE
) }}
#}
WITH bronze_traces AS (
SELECT
@ -25,7 +20,7 @@ WITH bronze_traces AS (
FROM
{% if is_incremental() and not full_reload_mode %}
{{ ref('bronze__streamline_traces') }}
{{ ref('bronze__traces') }}
WHERE
_inserted_timestamp >= (
SELECT
@ -34,7 +29,7 @@ WHERE
{{ this }}
)
AND DATA :result IS NOT NULL {% elif is_incremental() and full_reload_mode %}
{{ ref('bronze__streamline_fr_traces') }}
{{ ref('bronze__traces_fr') }}
WHERE
partition_key BETWEEN (
SELECT
@ -49,7 +44,7 @@ WHERE
{{ this }}
)
{% else %}
{{ ref('bronze__streamline_fr_traces') }}
{{ ref('bronze__traces_fr') }}
WHERE
partition_key <= 3000000
{% endif %}

View File

@ -1,4 +1,4 @@
-- depends_on: {{ ref('bronze__streamline_transactions') }}
-- depends_on: {{ ref('bronze__transactions') }}
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
@ -24,7 +24,7 @@ WITH base AS (
FROM
{% if is_incremental() %}
{{ ref('bronze__streamline_transactions') }}
{{ ref('bronze__transactions') }}
WHERE
_inserted_timestamp >= (
SELECT
@ -34,7 +34,7 @@ WHERE
)
AND IS_OBJECT(DATA)
{% else %}
{{ ref('bronze__streamline_fr_transactions') }}
{{ ref('bronze__transactions_fr') }}
WHERE
IS_OBJECT(DATA)
{% endif %}

View File

@ -12,6 +12,8 @@ sources:
- name: traces
- name: decoded_logs
- name: confirm_blocks
- name: blocks_v2
- name: transactions_v2
- name: udfs_streamline
database: udfs
schema: streamline

View File

@ -0,0 +1,39 @@
{# Set variables #}
{% set source_name = 'BLOCKS' %}
{% set source_version = 'V2' %}
{% set model_type = '' %}
{%- set default_vars = set_default_variables_bronze(source_name, model_type) -%}
{% set partition_function = default_vars['partition_function'] %}
{% set partition_join_key = default_vars['partition_join_key'] %}
{% set balances = default_vars['balances'] %}
{% set block_number = default_vars['block_number'] %}
{% set uses_receipts_by_hash = default_vars['uses_receipts_by_hash'] %}
{# Log configuration details #}
{{ log_bronze_details(
source_name = source_name,
source_version = source_version,
model_type = model_type,
partition_function = partition_function,
partition_join_key = partition_join_key,
block_number = block_number,
uses_receipts_by_hash = uses_receipts_by_hash
) }}
{# Set up dbt configuration #}
{{ config (
materialized = 'view',
tags = ['bronze_core']
) }}
{# Main query starts here #}
{{ streamline_external_table_query(
source_name = source_name.lower(),
source_version = source_version.lower(),
partition_function = partition_function,
balances = balances,
block_number = block_number,
uses_receipts_by_hash = uses_receipts_by_hash
) }}

View File

@ -0,0 +1,26 @@
{{ config (
materialized = 'view',
tags = ['bronze_core']
) }}
SELECT
partition_key,
block_number,
VALUE,
DATA,
metadata,
file_name,
_inserted_timestamp
FROM
{{ ref('bronze__blocks_fr_v2') }}
UNION ALL
SELECT
partition_key,
block_number,
VALUE,
DATA :result AS DATA,
metadata,
file_name,
_inserted_timestamp
FROM
{{ ref('bronze__blocks_fr_v1') }}

View File

@ -0,0 +1,40 @@
{# Set variables #}
{% set source_name = 'BLOCKS' %}
{% set source_version = '' %}
{% set model_type = 'FR' %}
{%- set default_vars = set_default_variables_bronze(source_name, model_type) -%}
{% set partition_function = default_vars['partition_function'] %}
{% set partition_join_key = default_vars['partition_join_key'] %}
{% set balances = default_vars['balances'] %}
{% set block_number = default_vars['block_number'] %}
{% set uses_receipts_by_hash = default_vars['uses_receipts_by_hash'] %}
{# Log configuration details #}
{{ log_bronze_details(
source_name = source_name,
source_version = source_version,
model_type = model_type,
partition_function = partition_function,
partition_join_key = partition_join_key,
block_number = block_number,
uses_receipts_by_hash = uses_receipts_by_hash
) }}
{# Set up dbt configuration #}
{{ config (
materialized = 'view',
tags = ['bronze_core']
) }}
{# Main query starts here #}
{{ streamline_external_table_query_fr(
source_name = source_name.lower(),
source_version = source_version.lower(),
partition_function = partition_function,
partition_join_key = partition_join_key,
balances = balances,
block_number = block_number,
uses_receipts_by_hash = uses_receipts_by_hash
) }}

View File

@ -0,0 +1,34 @@
{# Set variables #}
{% set source_name = 'BLOCKS' %}
{% set source_version = 'V2'%}
{% set model_type = 'FR' %}
{%- set default_vars = set_default_variables_bronze(source_name, model_type) -%}
{% set partition_function = default_vars['partition_function'] %}
{% set partition_join_key = default_vars['partition_join_key'] %}
{% set balances = default_vars['balances'] %}
{% set block_number = default_vars['block_number'] %}
{% set uses_receipts_by_hash = default_vars['uses_receipts_by_hash'] %}
{# Log configuration details #}
{{ log_model_details(
vars = default_vars
) }}
{# Set up dbt configuration #}
{{ config (
materialized = 'view',
tags = ['bronze_core']
) }}
{# Main query starts here #}
{{ streamline_external_table_query_fr(
source_name = source_name.lower(),
source_version = source_version.lower(),
partition_function = partition_function,
partition_join_key = partition_join_key,
balances = balances,
block_number = block_number,
uses_receipts_by_hash = uses_receipts_by_hash
) }}

View File

@ -0,0 +1,39 @@
{# Set variables #}
{% set source_name = 'CONFIRM_BLOCKS' %}
{% set source_version = '' %}
{% set model_type = '' %}
{%- set default_vars = set_default_variables_bronze(source_name, model_type) -%}
{% set partition_function = default_vars['partition_function'] %}
{% set partition_join_key = default_vars['partition_join_key'] %}
{% set balances = default_vars['balances'] %}
{% set block_number = default_vars['block_number'] %}
{% set uses_receipts_by_hash = default_vars['uses_receipts_by_hash'] %}
{# Log configuration details #}
{{ log_bronze_details(
source_name = source_name,
source_version = source_version,
model_type = model_type,
partition_function = partition_function,
partition_join_key = partition_join_key,
block_number = block_number,
uses_receipts_by_hash = uses_receipts_by_hash
) }}
{# Set up dbt configuration #}
{{ config (
materialized = 'view',
tags = ['bronze_core']
) }}
{# Main query starts here #}
{{ streamline_external_table_query(
source_name = source_name.lower(),
source_version = source_version.lower(),
partition_function = partition_function,
balances = balances,
block_number = block_number,
uses_receipts_by_hash = uses_receipts_by_hash
) }}

View File

@ -0,0 +1,15 @@
{{ config (
materialized = 'view',
tags = ['bronze_core']
) }}
SELECT
partition_key,
block_number,
VALUE,
DATA,
metadata,
file_name,
_inserted_timestamp
FROM
{{ ref('bronze__confirm_blocks_fr_v2') }}

View File

@ -0,0 +1,40 @@
{# Set variables #}
{% set source_name = 'CONFIRM_BLOCKS' %}
{% set source_version = '' %}
{% set model_type = 'FR' %}
{%- set default_vars = set_default_variables_bronze(source_name, model_type) -%}
{% set partition_function = default_vars['partition_function'] %}
{% set partition_join_key = default_vars['partition_join_key'] %}
{% set balances = default_vars['balances'] %}
{% set block_number = default_vars['block_number'] %}
{% set uses_receipts_by_hash = default_vars['uses_receipts_by_hash'] %}
{# Log configuration details #}
{{ log_bronze_details(
source_name = source_name,
source_version = source_version,
model_type = model_type,
partition_function = partition_function,
partition_join_key = partition_join_key,
block_number = block_number,
uses_receipts_by_hash = uses_receipts_by_hash
) }}
{# Set up dbt configuration #}
{{ config (
materialized = 'view',
tags = ['bronze_core']
) }}
{# Main query starts here #}
{{ streamline_external_table_query_fr(
source_name = source_name.lower(),
source_version = source_version.lower(),
partition_function = partition_function,
partition_join_key = partition_join_key,
balances = balances,
block_number = block_number,
uses_receipts_by_hash = uses_receipts_by_hash
) }}

View File

@ -0,0 +1,39 @@
{# Set variables #}
{% set source_name = 'RECEIPTS' %}
{% set source_version = '' %}
{% set model_type = '' %}
{%- set default_vars = set_default_variables_bronze(source_name, model_type) -%}
{% set partition_function = default_vars['partition_function'] %}
{% set partition_join_key = default_vars['partition_join_key'] %}
{% set balances = default_vars['balances'] %}
{% set block_number = default_vars['block_number'] %}
{% set uses_receipts_by_hash = default_vars['uses_receipts_by_hash'] %}
{# Log configuration details #}
{{ log_bronze_details(
source_name = source_name,
source_version = source_version,
model_type = model_type,
partition_function = partition_function,
partition_join_key = partition_join_key,
block_number = block_number,
uses_receipts_by_hash = uses_receipts_by_hash
) }}
{# Set up dbt configuration #}
{{ config (
materialized = 'view',
tags = ['bronze_receipts']
) }}
{# Main query starts here #}
{{ streamline_external_table_query(
source_name = source_name.lower(),
source_version = source_version.lower(),
partition_function = partition_function,
balances = balances,
block_number = block_number,
uses_receipts_by_hash = uses_receipts_by_hash
) }}

View File

@ -0,0 +1,16 @@
{{ config (
materialized = 'view',
tags = ['bronze_receipts']
) }}
SELECT
partition_key,
block_number,
array_index,
VALUE,
DATA,
metadata,
file_name,
_inserted_timestamp
FROM
{{ ref('bronze__receipts_fr_v2') }}

View File

@ -0,0 +1,40 @@
{# Set variables #}
{% set source_name = 'RECEIPTS' %}
{% set source_version = '' %}
{% set model_type = 'FR' %}
{%- set default_vars = set_default_variables_bronze(source_name, model_type) -%}
{% set partition_function = default_vars['partition_function'] %}
{% set partition_join_key = default_vars['partition_join_key'] %}
{% set balances = default_vars['balances'] %}
{% set block_number = default_vars['block_number'] %}
{% set uses_receipts_by_hash = default_vars['uses_receipts_by_hash'] %}
{# Log configuration details #}
{{ log_bronze_details(
source_name = source_name,
source_version = source_version,
model_type = model_type,
partition_function = partition_function,
partition_join_key = partition_join_key,
block_number = block_number,
uses_receipts_by_hash = uses_receipts_by_hash
) }}
{# Set up dbt configuration #}
{{ config (
materialized = 'view',
tags = ['bronze_receipts']
) }}
{# Main query starts here #}
{{ streamline_external_table_query_fr(
source_name = source_name.lower(),
source_version = source_version.lower(),
partition_function = partition_function,
partition_join_key = partition_join_key,
balances = balances,
block_number = block_number,
uses_receipts_by_hash = uses_receipts_by_hash
) }}

View File

@ -1,8 +0,0 @@
{{ config (
materialized = 'view'
) }}
{{ streamline_external_table_query_v2(
model = "blocks",
partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER )"
) }}

View File

@ -1,8 +0,0 @@
{{ config (
materialized = 'view'
) }}
{{ streamline_external_table_query_v2(
model = "confirm_blocks",
partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER )"
) }}

View File

@ -1,8 +0,0 @@
{{ config (
materialized = 'view'
) }}
{{ streamline_external_table_query_v2(
model = "receipts",
partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER )"
) }}

View File

@ -1,7 +0,0 @@
{{ config (
materialized = 'view'
) }}
{{ fsc_evm.streamline_external_table_query(
model = "traces",
partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER )"
) }}

View File

@ -1,8 +0,0 @@
{{ config (
materialized = 'view'
) }}
{{ streamline_external_table_query_v2(
model = "transactions",
partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER )"
) }}

View File

@ -0,0 +1,39 @@
{# Set variables #}
{% set source_name = 'TRACES' %}
{% set source_version = '' %}
{% set model_type = '' %}
{%- set default_vars = set_default_variables_bronze(source_name, model_type) -%}
{% set partition_function = default_vars['partition_function'] %}
{% set partition_join_key = default_vars['partition_join_key'] %}
{% set balances = default_vars['balances'] %}
{% set block_number = default_vars['block_number'] %}
{% set uses_receipts_by_hash = default_vars['uses_receipts_by_hash'] %}
{# Log configuration details #}
{{ log_bronze_details(
source_name = source_name,
source_version = source_version,
model_type = model_type,
partition_function = partition_function,
partition_join_key = partition_join_key,
block_number = block_number,
uses_receipts_by_hash = uses_receipts_by_hash
) }}
{# Set up dbt configuration #}
{{ config (
materialized = 'view',
tags = ['bronze_core']
) }}
{# Main query starts here #}
{{ streamline_external_table_query(
source_name = source_name.lower(),
source_version = source_version.lower(),
partition_function = partition_function,
balances = balances,
block_number = block_number,
uses_receipts_by_hash = uses_receipts_by_hash
) }}

View File

@ -0,0 +1,16 @@
{{ config (
materialized = 'view',
tags = ['bronze_core']
) }}
SELECT
partition_key,
block_number,
array_index,
VALUE,
DATA,
metadata,
file_name,
_inserted_timestamp
FROM
{{ ref('bronze__traces_fr_v2') }}

View File

@ -0,0 +1,40 @@
{# Set variables #}
{% set source_name = 'TRACES' %}
{% set source_version = '' %}
{% set model_type = 'FR' %}
{%- set default_vars = set_default_variables_bronze(source_name, model_type) -%}
{% set partition_function = default_vars['partition_function'] %}
{% set partition_join_key = default_vars['partition_join_key'] %}
{% set balances = default_vars['balances'] %}
{% set block_number = default_vars['block_number'] %}
{% set uses_receipts_by_hash = default_vars['uses_receipts_by_hash'] %}
{# Log configuration details #}
{{ log_bronze_details(
source_name = source_name,
source_version = source_version,
model_type = model_type,
partition_function = partition_function,
partition_join_key = partition_join_key,
block_number = block_number,
uses_receipts_by_hash = uses_receipts_by_hash
) }}
{# Set up dbt configuration #}
{{ config (
materialized = 'view',
tags = ['bronze_core']
) }}
{# Main query starts here #}
{{ streamline_external_table_query_fr(
source_name = source_name.lower(),
source_version = source_version.lower(),
partition_function = partition_function,
partition_join_key = partition_join_key,
balances = balances,
block_number = block_number,
uses_receipts_by_hash = uses_receipts_by_hash
) }}

View File

@ -0,0 +1,39 @@
{# Set variables #}
{% set source_name = 'TRANSACTIONS' %}
{% set source_version = 'V2' %}
{% set model_type = '' %}
{%- set default_vars = set_default_variables_bronze(source_name, model_type) -%}
{% set partition_function = default_vars['partition_function'] %}
{% set partition_join_key = default_vars['partition_join_key'] %}
{% set balances = default_vars['balances'] %}
{% set block_number = default_vars['block_number'] %}
{% set uses_receipts_by_hash = default_vars['uses_receipts_by_hash'] %}
{# Log configuration details #}
{{ log_bronze_details(
source_name = source_name,
source_version = source_version,
model_type = model_type,
partition_function = partition_function,
partition_join_key = partition_join_key,
block_number = block_number,
uses_receipts_by_hash = uses_receipts_by_hash
) }}
{# Set up dbt configuration #}
{{ config (
materialized = 'view',
tags = ['bronze_core']
) }}
{# Main query starts here #}
{{ streamline_external_table_query(
source_name = source_name.lower(),
source_version = source_version.lower(),
partition_function = partition_function,
balances = balances,
block_number = block_number,
uses_receipts_by_hash = uses_receipts_by_hash
) }}

View File

@ -0,0 +1,26 @@
{{ config (
materialized = 'view',
tags = ['bronze_core']
) }}
SELECT
partition_key,
block_number,
VALUE,
DATA,
metadata,
file_name,
_inserted_timestamp
FROM
{{ ref('bronze__transactions_fr_v2') }}
UNION ALL
SELECT
partition_key,
block_number,
VALUE,
DATA,
metadata,
file_name,
_inserted_timestamp
FROM
{{ ref('bronze__transactions_fr_v1') }}

View File

@ -0,0 +1,40 @@
{# Set variables #}
{% set source_name = 'TRANSACTIONS' %}
{% set source_version = '' %}
{% set model_type = 'FR' %}
{%- set default_vars = set_default_variables_bronze(source_name, model_type) -%}
{% set partition_function = default_vars['partition_function'] %}
{% set partition_join_key = default_vars['partition_join_key'] %}
{% set balances = default_vars['balances'] %}
{% set block_number = default_vars['block_number'] %}
{% set uses_receipts_by_hash = default_vars['uses_receipts_by_hash'] %}
{# Log configuration details #}
{{ log_bronze_details(
source_name = source_name,
source_version = source_version,
model_type = model_type,
partition_function = partition_function,
partition_join_key = partition_join_key,
block_number = block_number,
uses_receipts_by_hash = uses_receipts_by_hash
) }}
{# Set up dbt configuration #}
{{ config (
materialized = 'view',
tags = ['bronze_core']
) }}
{# Main query starts here #}
{{ streamline_external_table_query_fr(
source_name = source_name.lower(),
source_version = source_version.lower(),
partition_function = partition_function,
partition_join_key = partition_join_key,
balances = balances,
block_number = block_number,
uses_receipts_by_hash = uses_receipts_by_hash
) }}

View File

@ -0,0 +1,34 @@
{# Set variables #}
{% set source_name = 'TRANSACTIONS' %}
{% set source_version = 'V2'%}
{% set model_type = 'FR' %}
{%- set default_vars = set_default_variables_bronze(source_name, model_type) -%}
{% set partition_function = default_vars['partition_function'] %}
{% set partition_join_key = default_vars['partition_join_key'] %}
{% set balances = default_vars['balances'] %}
{% set block_number = default_vars['block_number'] %}
{% set uses_receipts_by_hash = default_vars['uses_receipts_by_hash'] %}
{# Log configuration details #}
{{ log_model_details(
vars = default_vars
) }}
{# Set up dbt configuration #}
{{ config (
materialized = 'view',
tags = ['bronze_core']
) }}
{# Main query starts here #}
{{ streamline_external_table_query_fr(
source_name = source_name.lower(),
source_version = source_version.lower(),
partition_function = partition_function,
partition_join_key = partition_join_key,
balances = balances,
block_number = block_number,
uses_receipts_by_hash = uses_receipts_by_hash
) }}

View File

@ -1,8 +0,0 @@
{{ config (
materialized = 'view'
) }}
{{ streamline_external_table_fr_query_v2(
model = "blocks",
partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER )"
) }}

View File

@ -1,8 +0,0 @@
{{ config (
materialized = 'view'
) }}
{{ streamline_external_table_fr_query_v2(
model = "confirm_blocks",
partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER )"
) }}

View File

@ -1,8 +0,0 @@
{{ config (
materialized = 'view'
) }}
{{ streamline_external_table_fr_query_v2(
model = "receipts",
partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER )"
) }}

View File

@ -1,7 +0,0 @@
{{ config (
materialized = 'view'
) }}
{{ fsc_evm.streamline_external_table_fr_query(
model = "traces",
partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER )"
) }}

View File

@ -1,8 +0,0 @@
{{ config (
materialized = 'view'
) }}
{{ streamline_external_table_fr_query_v2(
model = 'transactions',
partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER )"
) }}

View File

@ -1,41 +1,23 @@
{# Set variables #}
{% set source_name = 'DECODED_LOGS' %}
{% set source_version = '' %}
{% set model_type = '' %}
{%- set default_vars = set_default_variables_bronze(source_name, model_type) -%}
{# Log configuration details #}
{{ log_model_details(
vars = default_vars
) }}
{# Set up dbt configuration #}
{{ config (
materialized = 'view'
materialized = 'view',
tags = ['bronze_decoded_logs']
) }}
WITH meta AS (
SELECT
last_modified AS _inserted_timestamp,
file_name,
CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 6), '_', 1) AS INTEGER) AS _partition_by_block_number,
TO_DATE(
concat_ws('-', SPLIT_PART(file_name, '/', 3), SPLIT_PART(file_name, '/', 4), SPLIT_PART(file_name, '/', 5))
) AS _partition_by_created_date
FROM
TABLE(
information_schema.external_table_file_registration_history(
start_time => DATEADD('day', -3, CURRENT_TIMESTAMP()),
table_name => '{{ source( "bronze_streamline", "decoded_logs") }}')
) A
)
SELECT
block_number,
id :: STRING AS id,
DATA,
_inserted_timestamp,
s._partition_by_block_number AS _partition_by_block_number,
s._partition_by_created_date AS _partition_by_created_date
FROM
{{ source(
"bronze_streamline",
"decoded_logs"
{# Main query starts here #}
{{ streamline_external_table_query_decoder(
source_name = source_name.lower(),
source_version = source_version.lower()
) }}
s
JOIN meta b
ON b.file_name = metadata$filename
AND b._partition_by_block_number = s._partition_by_block_number
AND b._partition_by_created_date = s._partition_by_created_date
WHERE
b._partition_by_block_number = s._partition_by_block_number
AND b._partition_by_created_date = s._partition_by_created_date
AND s._partition_by_created_date >= DATEADD('day', -2, CURRENT_TIMESTAMP())

View File

@ -0,0 +1,13 @@
{# Log configuration details #}
{{ log_model_details() }}
{# Set up dbt configuration #}
{{ config (
materialized = 'view',
tags = ['bronze_decoded_logs']
) }}
SELECT
*
FROM
{{ ref('bronze__decoded_logs_fr_v2') }}

View File

@ -0,0 +1,23 @@
{# Set variables #}
{% set source_name = 'DECODED_LOGS' %}
{% set source_version = '' %}
{% set model_type = 'FR' %}
{%- set default_vars = set_default_variables_bronze(source_name, model_type) -%}
{# Log configuration details #}
{{ log_model_details(
vars = default_vars
) }}
{# Set up dbt configuration #}
{{ config (
materialized = 'view',
tags = ['bronze_decoded_logs']
) }}
{# Main query starts here #}
{{ streamline_external_table_query_decoder_fr(
source_name = source_name.lower(),
source_version = source_version.lower()
) }}

View File

@ -1,40 +0,0 @@
{{ config (
materialized = 'view'
) }}
WITH meta AS (
SELECT
registered_on AS _inserted_timestamp,
file_name,
CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 6), '_', 1) AS INTEGER) AS _partition_by_block_number,
TO_DATE(
concat_ws('-', SPLIT_PART(file_name, '/', 3), SPLIT_PART(file_name, '/', 4), SPLIT_PART(file_name, '/', 5))
) AS _partition_by_created_date
FROM
TABLE(
information_schema.external_table_files(
table_name => '{{ source( "bronze_streamline", "decoded_logs") }}'
)
) A
)
SELECT
block_number,
id :: STRING AS id,
DATA,
_inserted_timestamp,
s._partition_by_block_number AS _partition_by_block_number,
s._partition_by_created_date AS _partition_by_created_date
FROM
{{ source(
"bronze_streamline",
"decoded_logs"
) }}
s
JOIN meta b
ON b.file_name = metadata$filename
AND b._partition_by_block_number = s._partition_by_block_number
AND b._partition_by_created_date = s._partition_by_created_date
WHERE
b._partition_by_block_number = s._partition_by_block_number
AND b._partition_by_created_date = s._partition_by_created_date

View File

@ -3,7 +3,7 @@
) }}
SELECT
COALESCE(MIN(block_number), 0) AS block_number
MIN(block_number) AS block_number
FROM
{{ ref("silver__blocks") }}
WHERE

View File

@ -0,0 +1,50 @@
{# Set variables #}
{%- set source_name = 'BLOCKS' -%}
{%- set model_type = 'COMPLETE' -%}
{%- set full_refresh_type = var((source_name ~ '_complete_full_refresh').upper(), false) -%}
{% set post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(block_number)" %}
{# Log configuration details #}
{{ log_complete_details(
post_hook = post_hook,
full_refresh_type = full_refresh_type
) }}
{# Set up dbt configuration #}
-- depends_on: {{ ref('bronze__' ~ source_name.lower()) }}
{{ config (
materialized = "incremental",
unique_key = "block_number",
cluster_by = "ROUND(block_number, -3)",
post_hook = post_hook,
full_refresh = full_refresh_type,
tags = ['streamline_core_complete']
) }}
{# Main query starts here #}
SELECT
block_number,
file_name,
{{ dbt_utils.generate_surrogate_key(['block_number']) }} AS complete_{{ source_name.lower() }}_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp,
_inserted_timestamp,
'{{ invocation_id }}' AS _invocation_id
FROM
{% if is_incremental() %}
{{ ref('bronze__' ~ source_name.lower()) }}
WHERE
_inserted_timestamp >= (
SELECT
COALESCE(MAX(_inserted_timestamp), '1970-01-01'::TIMESTAMP) AS _inserted_timestamp
FROM
{{ this }}
)
{% else %}
{{ ref('bronze__' ~ source_name.lower() ~ '_fr') }}
{% endif %}
QUALIFY (ROW_NUMBER() OVER (PARTITION BY block_number ORDER BY _inserted_timestamp DESC)) = 1

View File

@ -1,42 +0,0 @@
-- depends_on: {{ ref('bronze__streamline_blocks') }}
{{ config (
materialized = "incremental",
unique_key = "block_number",
cluster_by = "ROUND(block_number, -3)",
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(block_number)",
tags = ['streamline_core_complete']
) }}
SELECT
COALESCE(
VALUE :BLOCK_NUMBER :: INT,
metadata :request :"data" :id :: INT,
PARSE_JSON(
metadata :request :"data"
) :id :: INT
) AS block_number,
{{ dbt_utils.generate_surrogate_key(
['block_number']
) }} AS complete_blocks_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp,
_inserted_timestamp,
'{{ invocation_id }}' AS _invocation_id
FROM
{% if is_incremental() %}
{{ ref('bronze__streamline_blocks') }}
WHERE
_inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) _inserted_timestamp
FROM
{{ this }}
)
{% else %}
{{ ref('bronze__streamline_fr_blocks') }}
{% endif %}
qualify(ROW_NUMBER() over (PARTITION BY block_number
ORDER BY
_inserted_timestamp DESC)) = 1

View File

@ -1,40 +0,0 @@
-- depends_on: {{ ref('bronze__streamline_confirm_blocks') }}
{{ config (
materialized = "incremental",
unique_key = "block_number",
cluster_by = "ROUND(block_number, -3)",
tags = ['streamline_core_complete']
) }}
SELECT
COALESCE(
VALUE :BLOCK_NUMBER :: INT,
metadata :request :"data" :id :: INT,
PARSE_JSON(
metadata :request :"data"
) :id :: INT
) AS block_number,
{{ dbt_utils.generate_surrogate_key(
['block_number']
) }} AS complete_confirmed_blocks_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp,
_inserted_timestamp,
'{{ invocation_id }}' AS _invocation_id
FROM
{% if is_incremental() %}
{{ ref('bronze__streamline_confirm_blocks') }}
WHERE
_inserted_timestamp >= (
SELECT
COALESCE(MAX(_inserted_timestamp), '1970-01-01' :: TIMESTAMP) _inserted_timestamp
FROM
{{ this }})
{% else %}
{{ ref('bronze__streamline_fr_confirm_blocks') }}
{% endif %}
qualify(ROW_NUMBER() over (PARTITION BY block_number
ORDER BY
_inserted_timestamp DESC)) = 1

View File

@ -1,42 +0,0 @@
-- depends_on: {{ ref('bronze__streamline_receipts') }}
{{ config (
materialized = "incremental",
unique_key = "block_number",
cluster_by = "ROUND(block_number, -3)",
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(block_number)",
tags = ['streamline_core_complete']
) }}
SELECT
COALESCE(
VALUE :BLOCK_NUMBER :: INT,
metadata :request :"data" :id :: INT,
PARSE_JSON(
metadata :request :"data"
) :id :: INT
) AS block_number,
{{ dbt_utils.generate_surrogate_key(
['block_number']
) }} AS complete_receipts_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp,
_inserted_timestamp,
'{{ invocation_id }}' AS _invocation_id
FROM
{% if is_incremental() %}
{{ ref('bronze__streamline_receipts') }}
WHERE
_inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) _inserted_timestamp
FROM
{{ this }}
)
{% else %}
{{ ref('bronze__streamline_fr_receipts') }}
{% endif %}
qualify(ROW_NUMBER() over (PARTITION BY block_number
ORDER BY
_inserted_timestamp DESC)) = 1

View File

@ -1,42 +0,0 @@
-- depends_on: {{ ref('bronze__streamline_traces') }}
{{ config (
materialized = "incremental",
unique_key = "block_number",
cluster_by = "ROUND(block_number, -3)",
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(block_number)",
tags = ['streamline_core_complete']
) }}
SELECT
COALESCE(
VALUE :BLOCK_NUMBER :: INT,
metadata :request :"data" :id :: INT,
PARSE_JSON(
metadata :request :"data"
) :id :: INT
) AS block_number,
{{ dbt_utils.generate_surrogate_key(
['block_number']
) }} AS complete_traces_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp,
_inserted_timestamp,
'{{ invocation_id }}' AS _invocation_id
FROM
{% if is_incremental() %}
{{ ref('bronze__streamline_traces') }}
WHERE
_inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) _inserted_timestamp
FROM
{{ this }}
)
{% else %}
{{ ref('bronze__streamline_fr_traces') }}
{% endif %}
qualify(ROW_NUMBER() over (PARTITION BY block_number
ORDER BY
_inserted_timestamp DESC)) = 1

View File

@ -1,42 +0,0 @@
-- depends_on: {{ ref('bronze__streamline_transactions') }}
{{ config (
materialized = "incremental",
unique_key = "block_number",
cluster_by = "ROUND(block_number, -3)",
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(block_number)",
tags = ['streamline_core_complete']
) }}
SELECT
COALESCE(
VALUE :BLOCK_NUMBER :: INT,
metadata :request :"data" :id :: INT,
PARSE_JSON(
metadata :request :"data"
) :id :: INT
) AS block_number,
{{ dbt_utils.generate_surrogate_key(
['block_number']
) }} AS complete_transactions_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp,
_inserted_timestamp,
'{{ invocation_id }}' AS _invocation_id
FROM
{% if is_incremental() %}
{{ ref('bronze__streamline_transactions') }}
WHERE
_inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) _inserted_timestamp
FROM
{{ this }}
)
{% else %}
{{ ref('bronze__streamline_fr_transactions') }}
{% endif %}
qualify(ROW_NUMBER() over (PARTITION BY block_number
ORDER BY
_inserted_timestamp DESC)) = 1

View File

@ -0,0 +1,50 @@
{# Set variables #}
{%- set source_name = 'CONFIRM_BLOCKS' -%}
{%- set model_type = 'COMPLETE' -%}
{%- set full_refresh_type = var((source_name ~ '_complete_full_refresh').upper(), false) -%}
{% set post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(block_number)" %}
{# Log configuration details #}
{{ log_complete_details(
post_hook = post_hook,
full_refresh_type = full_refresh_type
) }}
{# Set up dbt configuration #}
-- depends_on: {{ ref('bronze__' ~ source_name.lower()) }}
{{ config (
materialized = "incremental",
unique_key = "block_number",
cluster_by = "ROUND(block_number, -3)",
post_hook = post_hook,
full_refresh = full_refresh_type,
tags = ['streamline_core_complete_confirm_blocks']
) }}
{# Main query starts here #}
SELECT
block_number,
file_name,
{{ dbt_utils.generate_surrogate_key(['block_number']) }} AS complete_{{ source_name.lower() }}_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp,
_inserted_timestamp,
'{{ invocation_id }}' AS _invocation_id
FROM
{% if is_incremental() %}
{{ ref('bronze__' ~ source_name.lower()) }}
WHERE
_inserted_timestamp >= (
SELECT
COALESCE(MAX(_inserted_timestamp), '1970-01-01'::TIMESTAMP) AS _inserted_timestamp
FROM
{{ this }}
)
{% else %}
{{ ref('bronze__' ~ source_name.lower() ~ '_fr') }}
{% endif %}
QUALIFY (ROW_NUMBER() OVER (PARTITION BY block_number ORDER BY _inserted_timestamp DESC)) = 1

View File

@ -0,0 +1,50 @@
{# Set variables #}
{%- set source_name = 'RECEIPTS' -%}
{%- set model_type = 'COMPLETE' -%}
{%- set full_refresh_type = var((source_name ~ '_complete_full_refresh').upper(), false) -%}
{% set post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(block_number)" %}
{# Log configuration details #}
{{ log_complete_details(
post_hook = post_hook,
full_refresh_type = full_refresh_type
) }}
{# Set up dbt configuration #}
-- depends_on: {{ ref('bronze__' ~ source_name.lower()) }}
{{ config (
materialized = "incremental",
unique_key = "block_number",
cluster_by = "ROUND(block_number, -3)",
post_hook = post_hook,
full_refresh = full_refresh_type,
tags = ['streamline_core_complete_receipts']
) }}
{# Main query starts here #}
SELECT
block_number,
file_name,
{{ dbt_utils.generate_surrogate_key(['block_number']) }} AS complete_{{ source_name.lower() }}_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp,
_inserted_timestamp,
'{{ invocation_id }}' AS _invocation_id
FROM
{% if is_incremental() %}
{{ ref('bronze__' ~ source_name.lower()) }}
WHERE
_inserted_timestamp >= (
SELECT
COALESCE(MAX(_inserted_timestamp), '1970-01-01'::TIMESTAMP) AS _inserted_timestamp
FROM
{{ this }}
)
{% else %}
{{ ref('bronze__' ~ source_name.lower() ~ '_fr') }}
{% endif %}
QUALIFY (ROW_NUMBER() OVER (PARTITION BY block_number ORDER BY _inserted_timestamp DESC)) = 1

View File

@ -0,0 +1,50 @@
{# Set variables #}
{%- set source_name = 'TRACES' -%}
{%- set model_type = 'COMPLETE' -%}
{%- set full_refresh_type = var((source_name ~ '_complete_full_refresh').upper(), false) -%}
{% set post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(block_number)" %}
{# Log configuration details #}
{{ log_complete_details(
post_hook = post_hook,
full_refresh_type = full_refresh_type
) }}
{# Set up dbt configuration #}
-- depends_on: {{ ref('bronze__' ~ source_name.lower()) }}
{{ config (
materialized = "incremental",
unique_key = "block_number",
cluster_by = "ROUND(block_number, -3)",
post_hook = post_hook,
full_refresh = full_refresh_type,
tags = ['streamline_core_complete']
) }}
{# Main query starts here #}
SELECT
block_number,
file_name,
{{ dbt_utils.generate_surrogate_key(['block_number']) }} AS complete_{{ source_name.lower() }}_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp,
_inserted_timestamp,
'{{ invocation_id }}' AS _invocation_id
FROM
{% if is_incremental() %}
{{ ref('bronze__' ~ source_name.lower()) }}
WHERE
_inserted_timestamp >= (
SELECT
COALESCE(MAX(_inserted_timestamp), '1970-01-01'::TIMESTAMP) AS _inserted_timestamp
FROM
{{ this }}
)
{% else %}
{{ ref('bronze__' ~ source_name.lower() ~ '_fr') }}
{% endif %}
QUALIFY (ROW_NUMBER() OVER (PARTITION BY block_number ORDER BY _inserted_timestamp DESC)) = 1

View File

@ -0,0 +1,50 @@
{# Set variables #}
{%- set source_name = 'TRANSACTIONS' -%}
{%- set model_type = 'COMPLETE' -%}
{%- set full_refresh_type = var((source_name ~ '_complete_full_refresh').upper(), false) -%}
{% set post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(block_number)"%}
{# Log configuration details #}
{{ log_complete_details(
post_hook = post_hook,
full_refresh_type = full_refresh_type
) }}
{# Set up dbt configuration #}
-- depends_on: {{ ref('bronze__' ~ source_name.lower()) }}
{{ config (
materialized = "incremental",
unique_key = "block_number",
cluster_by = "ROUND(block_number, -3)",
post_hook = post_hook,
full_refresh = full_refresh_type,
tags = ['streamline_core_complete']
) }}
{# Main query starts here #}
SELECT
block_number,
file_name,
{{ dbt_utils.generate_surrogate_key(['block_number']) }} AS complete_{{ source_name.lower() }}_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp,
_inserted_timestamp,
'{{ invocation_id }}' AS _invocation_id
FROM
{% if is_incremental() %}
{{ ref('bronze__' ~ source_name.lower()) }}
WHERE
_inserted_timestamp >= (
SELECT
COALESCE(MAX(_inserted_timestamp), '1970-01-01'::TIMESTAMP) AS _inserted_timestamp
FROM
{{ this }}
)
{% else %}
{{ ref('bronze__' ~ source_name.lower() ~ '_fr') }}
{% endif %}
QUALIFY (ROW_NUMBER() OVER (PARTITION BY block_number ORDER BY _inserted_timestamp DESC)) = 1

View File

@ -1,74 +0,0 @@
{{ config (
materialized = "view",
post_hook = fsc_utils.if_data_call_function_v2(
func = 'streamline.udf_bulk_rest_api_v2',
target = "{{this.schema}}.{{this.identifier}}",
params ={ "external_table" :"blocks",
"sql_limit" :"100000",
"producer_batch_size" :"100000",
"worker_batch_size" :"50000",
"sql_source" :"{{this.identifier}}" }
),
tags = ['streamline_core_history']
) }}
WITH last_3_days AS (
SELECT
block_number
FROM
{{ ref("_block_lookback") }}
),
blocks AS (
SELECT
block_number
FROM
{{ ref("streamline__blocks") }}
WHERE
block_number <= (
SELECT
block_number
FROM
last_3_days
)
EXCEPT
SELECT
block_number
FROM
{{ ref("streamline__complete_blocks") }}
WHERE
block_number <= (
SELECT
block_number
FROM
last_3_days
)
)
SELECT
block_number,
ROUND(
block_number,
-3
) AS partition_key,
{{ target.database }}.live.udf_api(
'POST',
'{service}/{Authentication}',
OBJECT_CONSTRUCT(
'Content-Type',
'application/json'
),
OBJECT_CONSTRUCT(
'id',
block_number,
'jsonrpc',
'2.0',
'method',
'eth_getBlockByNumber',
'params',
ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number), FALSE)),
'vault/prod/blast/mainnet'
) AS request
FROM
blocks
ORDER BY
block_number ASC

View File

@ -0,0 +1,112 @@
{# Set variables #}
{%- set model_name = 'BLOCKS_TRANSACTIONS' -%}
{%- set model_type = 'HISTORY' -%}
{%- set default_vars = set_default_variables_streamline(model_name, model_type) -%}
{# Set up parameters for the streamline process. These will come from the vars set in dbt_project.yml #}
{%- set streamline_params = set_streamline_parameters(
model_name=model_name,
model_type=model_type
) -%}
{%- set node_url = default_vars['node_url'] -%}
{%- set node_secret_path = default_vars['node_secret_path'] -%}
{%- set model_quantum_state = default_vars['model_quantum_state'] -%}
{%- set sql_limit = streamline_params['sql_limit'] -%}
{%- set testing_limit = default_vars['testing_limit'] -%}
{%- set order_by_clause = default_vars['order_by_clause'] -%}
{%- set new_build = default_vars['new_build'] -%}
{%- set method_params = streamline_params['method_params'] -%}
{%- set method = streamline_params['method'] -%}
{# Log configuration details #}
{{ log_streamline_details(
model_name=model_name,
model_type=model_type,
node_url=node_url,
model_quantum_state=model_quantum_state,
sql_limit=sql_limit,
testing_limit=testing_limit,
order_by_clause=order_by_clause,
new_build=new_build,
streamline_params=streamline_params,
method_params=method_params,
method=method
) }}
{# Set up dbt configuration #}
{{ config (
materialized = "view",
post_hook = fsc_utils.if_data_call_function_v2(
func = 'streamline.udf_bulk_rest_api_v2',
target = "{{this.schema}}.{{this.identifier}}",
params = streamline_params
),
tags = ['streamline_core_history']
) }}
{# Main query starts here #}
WITH
{% if not new_build %}
last_3_days AS (
SELECT block_number
FROM {{ ref("_block_lookback") }}
),
{% endif %}
{# Identify blocks that need processing #}
to_do AS (
SELECT block_number
FROM {{ ref("streamline__blocks") }}
WHERE
block_number IS NOT NULL
{% if not new_build %}
AND block_number <= (SELECT block_number FROM last_3_days)
{% endif %}
EXCEPT
SELECT block_number
FROM {{ ref("streamline__blocks_complete") }} b
INNER JOIN {{ ref("streamline__transactions_complete") }} t USING(block_number)
WHERE 1=1
{% if not new_build %}
AND block_number <= (SELECT block_number FROM last_3_days)
{% endif %}
),
ready_blocks AS (
SELECT block_number
FROM to_do
{% if testing_limit is not none %}
LIMIT {{ testing_limit }}
{% endif %}
)
{# Generate API requests for each block #}
SELECT
block_number,
ROUND(block_number, -3) AS partition_key,
live.udf_api(
'POST',
'{{ node_url }}',
OBJECT_CONSTRUCT(
'Content-Type', 'application/json',
'fsc-quantum-state', '{{ model_quantum_state }}'
),
OBJECT_CONSTRUCT(
'id', block_number,
'jsonrpc', '2.0',
'method', '{{ method }}',
'params', {{ method_params }}
),
'{{ node_secret_path }}'
) AS request
FROM
ready_blocks
{{ order_by_clause }}
LIMIT {{ sql_limit }}

View File

@ -0,0 +1,133 @@
{# Set variables #}
{%- set model_name = 'CONFIRM_BLOCKS' -%}
{%- set model_type = 'HISTORY' -%}
{%- set default_vars = set_default_variables_streamline(model_name, model_type) -%}
{# Set up parameters for the streamline process. These will come from the vars set in dbt_project.yml #}
{%- set streamline_params = set_streamline_parameters(
model_name=model_name,
model_type=model_type
) -%}
{%- set node_url = default_vars['node_url'] -%}
{%- set node_secret_path = default_vars['node_secret_path'] -%}
{%- set model_quantum_state = default_vars['model_quantum_state'] -%}
{%- set sql_limit = streamline_params['sql_limit'] -%}
{%- set testing_limit = default_vars['testing_limit'] -%}
{%- set order_by_clause = default_vars['order_by_clause'] -%}
{%- set new_build = default_vars['new_build'] -%}
{%- set method_params = streamline_params['method_params'] -%}
{%- set method = streamline_params['method'] -%}
{# Log configuration details #}
{{ log_streamline_details(
model_name=model_name,
model_type=model_type,
node_url=node_url,
model_quantum_state=model_quantum_state,
sql_limit=sql_limit,
testing_limit=testing_limit,
order_by_clause=order_by_clause,
new_build=new_build,
streamline_params=streamline_params,
method_params=method_params,
method=method
) }}
{# Set up dbt configuration #}
{{ config (
materialized = "view",
post_hook = fsc_utils.if_data_call_function_v2(
func = 'streamline.udf_bulk_rest_api_v2',
target = "{{this.schema}}.{{this.identifier}}",
params = streamline_params
),
tags = ['streamline_core_history_confirm_blocks']
) }}
{# Main query starts here #}
WITH
{% if not new_build %}
last_3_days AS (
SELECT block_number
FROM {{ ref("_block_lookback") }}
),
{% endif %}
{# Delay blocks #}
look_back AS (
SELECT
block_number
FROM
{{ ref("_max_block_by_hour") }}
qualify ROW_NUMBER() over (
ORDER BY
block_number DESC
) = 6
),
{# Identify blocks that need processing #}
to_do AS (
SELECT block_number
FROM {{ ref("streamline__blocks") }}
WHERE
block_number IS NOT NULL
AND block_number <= (SELECT block_number FROM look_back)
{% if not new_build %}
AND block_number <= (SELECT block_number FROM last_3_days)
{% endif %}
EXCEPT
{# Exclude blocks that have already been processed #}
SELECT block_number
FROM {{ ref('streamline__' ~ model_name.lower() ~ '_complete') }}
WHERE 1=1
AND block_number IS NOT NULL
AND block_number <= (SELECT block_number FROM look_back)
AND _inserted_timestamp >= DATEADD(
'day',
-4,
SYSDATE()
)
{% if not new_build %}
AND block_number <= (SELECT block_number FROM last_3_days)
{% endif %}
)
{# Prepare the final list of blocks to process #}
,ready_blocks AS (
SELECT block_number
FROM to_do
{% if testing_limit is not none %}
LIMIT {{ testing_limit }}
{% endif %}
)
{# Generate API requests for each block #}
SELECT
block_number,
ROUND(block_number, -3) AS partition_key,
live.udf_api(
'POST',
'{{ node_url }}',
OBJECT_CONSTRUCT(
'Content-Type', 'application/json',
'fsc-quantum-state', '{{ model_quantum_state }}'
),
OBJECT_CONSTRUCT(
'id', block_number,
'jsonrpc', '2.0',
'method', '{{ method }}',
'params', {{ method_params }}
),
'{{ node_secret_path }}'
) AS request
FROM
ready_blocks
{{ order_by_clause }}
LIMIT {{ sql_limit }}

View File

@ -1,75 +1,113 @@
{# Set variables #}
{%- set model_name = 'RECEIPTS' -%}
{%- set model_type = 'HISTORY' -%}
{%- set default_vars = set_default_variables_streamline(model_name, model_type) -%}
{# Set up parameters for the streamline process. These will come from the vars set in dbt_project.yml #}
{%- set streamline_params = set_streamline_parameters(
model_name=model_name,
model_type=model_type
) -%}
{%- set node_url = default_vars['node_url'] -%}
{%- set node_secret_path = default_vars['node_secret_path'] -%}
{%- set model_quantum_state = default_vars['model_quantum_state'] -%}
{%- set sql_limit = streamline_params['sql_limit'] -%}
{%- set testing_limit = default_vars['testing_limit'] -%}
{%- set order_by_clause = default_vars['order_by_clause'] -%}
{%- set new_build = default_vars['new_build'] -%}
{%- set method_params = streamline_params['method_params'] -%}
{%- set method = streamline_params['method'] -%}
{# Log configuration details #}
{{ log_streamline_details(
model_name=model_name,
model_type=model_type,
node_url=node_url,
model_quantum_state=model_quantum_state,
sql_limit=sql_limit,
testing_limit=testing_limit,
order_by_clause=order_by_clause,
new_build=new_build,
streamline_params=streamline_params,
method_params=method_params,
method=method
) }}
{# Set up dbt configuration #}
{{ config (
materialized = "view",
post_hook = fsc_utils.if_data_call_function_v2(
func = 'streamline.udf_bulk_rest_api_v2',
target = "{{this.schema}}.{{this.identifier}}",
params ={ "external_table" :"receipts",
"sql_limit" :"100000",
"producer_batch_size" :"100000",
"worker_batch_size" :"50000",
"sql_source" :"{{this.identifier}}",
"exploded_key": "[\"result\"]" }
params = streamline_params
),
tags = ['streamline_core_history']
tags = ['streamline_core_history_receipts']
) }}
WITH last_3_days AS (
{# Main query starts here #}
WITH
{% if not new_build %}
last_3_days AS (
SELECT block_number
FROM {{ ref("_block_lookback") }}
),
{% endif %}
SELECT
block_number
FROM
{{ ref("_block_lookback") }}
),
blocks AS (
SELECT
block_number
FROM
{{ ref("streamline__blocks") }}
{# Identify blocks that need processing #}
to_do AS (
SELECT block_number
FROM {{ ref("streamline__blocks") }}
WHERE
block_number <= (
SELECT
block_number
FROM
last_3_days
)
block_number IS NOT NULL
{% if not new_build %}
AND block_number <= (SELECT block_number FROM last_3_days)
{% endif %}
EXCEPT
SELECT
block_number
FROM
{{ ref("streamline__complete_receipts") }}
WHERE
block_number <= (
SELECT
block_number
FROM
last_3_days
{# Exclude blocks that have already been processed #}
SELECT block_number
FROM {{ ref('streamline__' ~ model_name.lower() ~ '_complete') }}
WHERE 1=1
{% if not new_build %}
AND block_number <= (SELECT block_number FROM last_3_days)
{% endif %}
)
{# Prepare the final list of blocks to process #}
,ready_blocks AS (
SELECT block_number
FROM to_do
{% if testing_limit is not none %}
LIMIT {{ testing_limit }}
{% endif %}
)
{# Generate API requests for each block #}
SELECT
block_number,
ROUND(
block_number,
-3
) AS partition_key,
{{ target.database }}.live.udf_api(
ROUND(block_number, -3) AS partition_key,
live.udf_api(
'POST',
'{service}/{Authentication}',
'{{ node_url }}',
OBJECT_CONSTRUCT(
'Content-Type',
'application/json'
'Content-Type', 'application/json',
'fsc-quantum-state', '{{ model_quantum_state }}'
),
OBJECT_CONSTRUCT(
'id',
block_number,
'jsonrpc',
'2.0',
'method',
'eth_getBlockReceipts',
'params',
ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number))),
'vault/prod/blast/mainnet'
'id', block_number,
'jsonrpc', '2.0',
'method', '{{ method }}',
'params', {{ method_params }}
),
'{{ node_secret_path }}'
) AS request
FROM
blocks
ORDER BY
block_number ASC
ready_blocks
{{ order_by_clause }}
LIMIT {{ sql_limit }}

View File

@ -1,76 +1,113 @@
{# Set variables #}
{%- set model_name = 'TRACES' -%}
{%- set model_type = 'HISTORY' -%}
{%- set default_vars = set_default_variables_streamline(model_name, model_type) -%}
{# Set up parameters for the streamline process. These will come from the vars set in dbt_project.yml #}
{%- set streamline_params = set_streamline_parameters(
model_name=model_name,
model_type=model_type
) -%}
{%- set node_url = default_vars['node_url'] -%}
{%- set node_secret_path = default_vars['node_secret_path'] -%}
{%- set model_quantum_state = default_vars['model_quantum_state'] -%}
{%- set sql_limit = streamline_params['sql_limit'] -%}
{%- set testing_limit = default_vars['testing_limit'] -%}
{%- set order_by_clause = default_vars['order_by_clause'] -%}
{%- set new_build = default_vars['new_build'] -%}
{%- set method_params = streamline_params['method_params'] -%}
{%- set method = streamline_params['method'] -%}
{# Log configuration details #}
{{ log_streamline_details(
model_name=model_name,
model_type=model_type,
node_url=node_url,
model_quantum_state=model_quantum_state,
sql_limit=sql_limit,
testing_limit=testing_limit,
order_by_clause=order_by_clause,
new_build=new_build,
streamline_params=streamline_params,
method_params=method_params,
method=method
) }}
{# Set up dbt configuration #}
{{ config (
materialized = "view",
post_hook = fsc_utils.if_data_call_function_v2(
func = 'streamline.udf_bulk_rest_api_v2',
target = "{{this.schema}}.{{this.identifier}}",
params ={ "external_table" :"traces",
"sql_limit" :"100000",
"producer_batch_size" :"100000",
"worker_batch_size" :"50000",
"sql_source" :"{{this.identifier}}",
"exploded_key": "[\"result\"]" }
params = streamline_params
),
tags = ['streamline_core_history']
) }}
WITH last_3_days AS (
{# Main query starts here #}
WITH
{% if not new_build %}
last_3_days AS (
SELECT block_number
FROM {{ ref("_block_lookback") }}
),
{% endif %}
SELECT
block_number
FROM
{{ ref("_block_lookback") }}
),
blocks AS (
SELECT
block_number
FROM
{{ ref("streamline__blocks") }}
{# Identify blocks that need processing #}
to_do AS (
SELECT block_number
FROM {{ ref("streamline__blocks") }}
WHERE
block_number <= (
SELECT
block_number
FROM
last_3_days
)
block_number IS NOT NULL
{% if not new_build %}
AND block_number <= (SELECT block_number FROM last_3_days)
{% endif %}
EXCEPT
SELECT
block_number
FROM
{{ ref("streamline__complete_traces") }}
WHERE
block_number <= (
SELECT
block_number
FROM
last_3_days
{# Exclude blocks that have already been processed #}
SELECT block_number
FROM {{ ref('streamline__' ~ model_name.lower() ~ '_complete') }}
WHERE 1=1
{% if not new_build %}
AND block_number <= (SELECT block_number FROM last_3_days)
{% endif %}
)
{# Prepare the final list of blocks to process #}
,ready_blocks AS (
SELECT block_number
FROM to_do
{% if testing_limit is not none %}
LIMIT {{ testing_limit }}
{% endif %}
)
{# Generate API requests for each block #}
SELECT
block_number,
ROUND(
block_number,
-3
) AS partition_key,
{{ target.database }}.live.udf_api(
ROUND(block_number, -3) AS partition_key,
live.udf_api(
'POST',
'{service}/{Authentication}',
'{{ node_url }}',
OBJECT_CONSTRUCT(
'Content-Type',
'application/json'
'Content-Type', 'application/json',
'fsc-quantum-state', '{{ model_quantum_state }}'
),
OBJECT_CONSTRUCT(
'id',
block_number,
'jsonrpc',
'2.0',
'method',
'debug_traceBlockByNumber',
'params',
ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number), OBJECT_CONSTRUCT('tracer', 'callTracer', 'timeout', '30s'))
'id', block_number,
'jsonrpc', '2.0',
'method', '{{ method }}',
'params', {{ method_params }}
),
'vault/prod/blast/mainnet'
'{{ node_secret_path }}'
) AS request
FROM
blocks
ORDER BY
block_number ASC
ready_blocks
{{ order_by_clause }}
LIMIT {{ sql_limit }}

View File

@ -1,75 +0,0 @@
{{ config (
materialized = "view",
post_hook = fsc_utils.if_data_call_function_v2(
func = 'streamline.udf_bulk_rest_api_v2',
target = "{{this.schema}}.{{this.identifier}}",
params ={ "external_table" :"transactions",
"sql_limit" :"100000",
"producer_batch_size" :"100000",
"worker_batch_size" :"50000",
"sql_source" :"{{this.identifier}}",
"exploded_key": "[\"result\", \"transactions\"]" }
),
tags = ['streamline_core_history']
) }}
WITH last_3_days AS (
SELECT
block_number
FROM
{{ ref("_block_lookback") }}
),
blocks AS (
SELECT
block_number
FROM
{{ ref("streamline__blocks") }}
WHERE
block_number <= (
SELECT
block_number
FROM
last_3_days
)
EXCEPT
SELECT
block_number
FROM
{{ ref("streamline__complete_transactions") }}
WHERE
block_number <= (
SELECT
block_number
FROM
last_3_days
)
)
SELECT
block_number,
ROUND(
block_number,
-3
) AS partition_key,
{{ target.database }}.live.udf_api(
'POST',
'{service}/{Authentication}',
OBJECT_CONSTRUCT(
'Content-Type',
'application/json'
),
OBJECT_CONSTRUCT(
'id',
block_number,
'jsonrpc',
'2.0',
'method',
'eth_getBlockByNumber',
'params',
ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number), TRUE)),
'vault/prod/blast/mainnet'
) AS request
FROM
blocks
ORDER BY
block_number ASC

View File

@ -1,82 +0,0 @@
{{ config (
materialized = "view",
post_hook = fsc_utils.if_data_call_function_v2(
func = 'streamline.udf_bulk_rest_api_v2',
target = "{{this.schema}}.{{this.identifier}}",
params ={ "external_table" :"blocks",
"sql_limit" :"100000",
"producer_batch_size" :"100000",
"worker_batch_size" :"50000",
"sql_source" :"{{this.identifier}}" }
),
tags = ['streamline_core_realtime']
) }}
WITH last_3_days AS (
SELECT
block_number
FROM
{{ ref("_block_lookback") }}
),
to_do AS (
SELECT
block_number
FROM
{{ ref("streamline__blocks") }}
WHERE
(
block_number >= (
SELECT
block_number
FROM
last_3_days
)
)
AND block_number IS NOT NULL
EXCEPT
SELECT
block_number
FROM
{{ ref("streamline__complete_blocks") }}
WHERE
block_number >= (
SELECT
block_number
FROM
last_3_days
)
AND _inserted_timestamp >= DATEADD(
'day',
-4,
SYSDATE()
)
)
SELECT
block_number,
ROUND(
block_number,
-3
) AS partition_key,
{{ target.database }}.live.udf_api(
'POST',
'{service}/{Authentication}',
OBJECT_CONSTRUCT(
'Content-Type',
'application/json'
),
OBJECT_CONSTRUCT(
'id',
block_number,
'jsonrpc',
'2.0',
'method',
'eth_getBlockByNumber',
'params',
ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number), FALSE)),
'vault/prod/blast/mainnet'
) AS request
FROM
to_do
ORDER BY
partition_key ASC

View File

@ -0,0 +1,126 @@
{# Set variables #}
{%- set model_name = 'BLOCKS_TRANSACTIONS' -%}
{%- set model_type = 'REALTIME' -%}
{%- set min_block = var('GLOBAL_START_UP_BLOCK', none) -%}
{%- set default_vars = set_default_variables_streamline(model_name, model_type) -%}
{# Set up parameters for the streamline process. These will come from the vars set in dbt_project.yml #}
{%- set streamline_params = set_streamline_parameters(
model_name=model_name,
model_type=model_type
) -%}
{%- set node_url = default_vars['node_url'] -%}
{%- set node_secret_path = default_vars['node_secret_path'] -%}
{%- set model_quantum_state = default_vars['model_quantum_state'] -%}
{%- set sql_limit = streamline_params['sql_limit'] -%}
{%- set testing_limit = default_vars['testing_limit'] -%}
{%- set order_by_clause = default_vars['order_by_clause'] -%}
{%- set new_build = default_vars['new_build'] -%}
{%- set method_params = streamline_params['method_params'] -%}
{%- set method = streamline_params['method'] -%}
{# Log configuration details #}
{{ log_streamline_details(
model_name=model_name,
model_type=model_type,
node_url=node_url,
model_quantum_state=model_quantum_state,
sql_limit=sql_limit,
testing_limit=testing_limit,
order_by_clause=order_by_clause,
new_build=new_build,
streamline_params=streamline_params,
method_params=method_params,
method=method,
min_block=min_block
) }}
{# Set up dbt configuration #}
{{ config (
materialized = "view",
post_hook = fsc_utils.if_data_call_function_v2(
func = 'streamline.udf_bulk_rest_api_v2',
target = "{{this.schema}}.{{this.identifier}}",
params = streamline_params
),
tags = ['streamline_core_realtime']
) }}
{# Main query starts here #}
WITH
{% if not new_build %}
last_3_days AS (
SELECT block_number
FROM {{ ref("_block_lookback") }}
),
{% endif %}
{# Identify blocks that need processing #}
to_do AS (
SELECT block_number
FROM {{ ref("streamline__blocks") }}
WHERE
block_number IS NOT NULL
{% if not new_build %}
AND block_number >= (SELECT block_number FROM last_3_days)
{% endif %}
{% if min_block is not none %}
AND block_number >= {{ min_block }}
{% endif %}
EXCEPT
SELECT block_number
FROM {{ ref("streamline__blocks_complete") }} b
INNER JOIN {{ ref("streamline__transactions_complete") }} t USING(block_number)
WHERE 1=1
{% if not new_build %}
AND block_number >= (SELECT block_number FROM last_3_days)
{% endif %}
),
ready_blocks AS (
SELECT block_number
FROM to_do
{% if not new_build %}
UNION
SELECT block_number
FROM {{ ref("_unconfirmed_blocks") }}
UNION
SELECT block_number
FROM {{ ref("_missing_txs") }}
{% endif %}
{% if testing_limit is not none %}
LIMIT {{ testing_limit }}
{% endif %}
)
{# Generate API requests for each block #}
SELECT
block_number,
ROUND(block_number, -3) AS partition_key,
live.udf_api(
'POST',
'{{ node_url }}',
OBJECT_CONSTRUCT(
'Content-Type', 'application/json',
'fsc-quantum-state', '{{ model_quantum_state }}'
),
OBJECT_CONSTRUCT(
'id', block_number,
'jsonrpc', '2.0',
'method', '{{ method }}',
'params', {{ method_params }}
),
'{{ node_secret_path }}'
) AS request
FROM
ready_blocks
{{ order_by_clause }}
LIMIT {{ sql_limit }}

View File

@ -1,24 +1,63 @@
{# Set variables #}
{%- set model_name = 'CONFIRM_BLOCKS' -%}
{%- set model_type = 'REALTIME' -%}
{%- set min_block = var('GLOBAL_START_UP_BLOCK', none) -%}
{%- set default_vars = set_default_variables_streamline(model_name, model_type) -%}
{# Set up parameters for the streamline process. These will come from the vars set in dbt_project.yml #}
{%- set streamline_params = set_streamline_parameters(
model_name=model_name,
model_type=model_type
) -%}
{%- set node_url = default_vars['node_url'] -%}
{%- set node_secret_path = default_vars['node_secret_path'] -%}
{%- set model_quantum_state = default_vars['model_quantum_state'] -%}
{%- set sql_limit = streamline_params['sql_limit'] -%}
{%- set testing_limit = default_vars['testing_limit'] -%}
{%- set order_by_clause = default_vars['order_by_clause'] -%}
{%- set new_build = default_vars['new_build'] -%}
{%- set method_params = streamline_params['method_params'] -%}
{%- set method = streamline_params['method'] -%}
{# Log configuration details #}
{{ log_streamline_details(
model_name=model_name,
model_type=model_type,
node_url=node_url,
model_quantum_state=model_quantum_state,
sql_limit=sql_limit,
testing_limit=testing_limit,
order_by_clause=order_by_clause,
new_build=new_build,
streamline_params=streamline_params,
method_params=method_params,
method=method,
min_block=min_block
) }}
{# Set up dbt configuration #}
{{ config (
materialized = "view",
post_hook = fsc_utils.if_data_call_function_v2(
func = 'streamline.udf_bulk_rest_api_v2',
target = "{{this.schema}}.{{this.identifier}}",
params ={ "external_table" :"confirm_blocks",
"sql_limit" :"100000",
"producer_batch_size" :"100000",
"worker_batch_size" :"50000",
"sql_source" :"{{this.identifier}}" }
params = streamline_params
),
tags = ['streamline_core_realtime']
tags = ['streamline_core_realtime_confirm_blocks']
) }}
WITH last_3_days AS (
SELECT
block_number
FROM
{{ ref("_block_lookback") }}
{# Main query starts here #}
WITH
{% if not new_build %}
last_3_days AS (
SELECT block_number
FROM {{ ref("_block_lookback") }}
),
{% endif %}
{# Delay blocks #}
look_back AS (
SELECT
block_number
@ -29,75 +68,71 @@ look_back AS (
block_number DESC
) = 6
),
tbl AS (
SELECT
block_number
FROM
{{ ref("streamline__blocks") }}
{# Identify blocks that need processing #}
to_do AS (
SELECT block_number
FROM {{ ref("streamline__blocks") }}
WHERE
block_number IS NOT NULL
AND block_number <= (
SELECT
block_number
FROM
look_back
)
AND block_number >= (
SELECT
block_number
FROM
last_3_days
)
AND block_number <= (SELECT block_number FROM look_back)
{% if not new_build %}
AND block_number >= (SELECT block_number FROM last_3_days)
{% endif %}
{% if min_block is not none %}
AND block_number >= {{ min_block }}
{% endif %}
EXCEPT
SELECT
block_number
FROM
{{ ref("streamline__complete_confirmed_blocks") }}
WHERE
block_number IS NOT NULL
AND block_number <= (
SELECT
block_number
FROM
look_back
)
{# Exclude blocks that have already been processed #}
SELECT block_number
FROM {{ ref('streamline__' ~ model_name.lower() ~ '_complete') }}
WHERE 1=1
AND block_number IS NOT NULL
AND block_number <= (SELECT block_number FROM look_back)
AND _inserted_timestamp >= DATEADD(
'day',
-4,
SYSDATE()
)
AND block_number >= (
SELECT
block_number
FROM
last_3_days
{% if not new_build %}
AND block_number >= (SELECT block_number FROM last_3_days)
{% endif %}
)
{# Prepare the final list of blocks to process #}
,ready_blocks AS (
SELECT block_number
FROM to_do
{% if testing_limit is not none %}
LIMIT {{ testing_limit }}
{% endif %}
)
{# Generate API requests for each block #}
SELECT
block_number,
ROUND(
block_number,
-3
) AS partition_key,
{{ target.database }}.live.udf_api(
ROUND(block_number, -3) AS partition_key,
live.udf_api(
'POST',
'{service}/{Authentication}',
'{{ node_url }}',
OBJECT_CONSTRUCT(
'Content-Type',
'application/json'
'Content-Type', 'application/json',
'fsc-quantum-state', '{{ model_quantum_state }}'
),
OBJECT_CONSTRUCT(
'id',
block_number,
'jsonrpc',
'2.0',
'method',
'eth_getBlockByNumber',
'params',
ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number), FALSE)),
'vault/prod/blast/mainnet'
'id', block_number,
'jsonrpc', '2.0',
'method', '{{ method }}',
'params', {{ method_params }}
),
'{{ node_secret_path }}'
) AS request
FROM
tbl
ORDER BY
block_number ASC
ready_blocks
{{ order_by_clause }}
LIMIT {{ sql_limit }}

View File

@ -1,109 +1,130 @@
{# Set variables #}
{%- set model_name = 'RECEIPTS' -%}
{%- set model_type = 'REALTIME' -%}
{%- set min_block = var('GLOBAL_START_UP_BLOCK', none) -%}
{%- set default_vars = set_default_variables_streamline(model_name, model_type) -%}
{# Set up parameters for the streamline process. These will come from the vars set in dbt_project.yml #}
{%- set streamline_params = set_streamline_parameters(
model_name=model_name,
model_type=model_type
) -%}
{%- set node_url = default_vars['node_url'] -%}
{%- set node_secret_path = default_vars['node_secret_path'] -%}
{%- set model_quantum_state = default_vars['model_quantum_state'] -%}
{%- set sql_limit = streamline_params['sql_limit'] -%}
{%- set testing_limit = default_vars['testing_limit'] -%}
{%- set order_by_clause = default_vars['order_by_clause'] -%}
{%- set new_build = default_vars['new_build'] -%}
{%- set method_params = streamline_params['method_params'] -%}
{%- set method = streamline_params['method'] -%}
{# Log configuration details #}
{{ log_streamline_details(
model_name=model_name,
model_type=model_type,
node_url=node_url,
model_quantum_state=model_quantum_state,
sql_limit=sql_limit,
testing_limit=testing_limit,
order_by_clause=order_by_clause,
new_build=new_build,
streamline_params=streamline_params,
method_params=method_params,
method=method,
min_block=min_block
) }}
{# Set up dbt configuration #}
{{ config (
materialized = "view",
post_hook = fsc_utils.if_data_call_function_v2(
func = 'streamline.udf_bulk_rest_api_v2',
target = "{{this.schema}}.{{this.identifier}}",
params ={ "external_table" :"receipts",
"sql_limit" :"100000",
"producer_batch_size" :"100000",
"worker_batch_size" :"50000",
"sql_source" :"{{this.identifier}}",
"exploded_key": "[\"result\"]" }
params = streamline_params
),
tags = ['streamline_core_realtime']
tags = ['streamline_core_realtime_receipts']
) }}
WITH last_3_days AS (
{# Main query starts here #}
WITH
{% if not new_build %}
last_3_days AS (
SELECT block_number
FROM {{ ref("_block_lookback") }}
),
{% endif %}
SELECT
block_number
FROM
{{ ref("_block_lookback") }}
),
{# Identify blocks that need processing #}
to_do AS (
SELECT
block_number
FROM
{{ ref("streamline__blocks") }}
SELECT block_number
FROM {{ ref("streamline__blocks") }}
WHERE
(
block_number >= (
SELECT
block_number
FROM
last_3_days
)
)
AND block_number IS NOT NULL
block_number IS NOT NULL
{% if not new_build %}
AND block_number >= (SELECT block_number FROM last_3_days)
{% endif %}
{% if min_block is not none %}
AND block_number >= {{ min_block }}
{% endif %}
EXCEPT
SELECT
block_number
FROM
{{ ref("streamline__complete_receipts") }}
WHERE
block_number >= (
SELECT
block_number
FROM
last_3_days
{# Exclude blocks that have already been processed #}
SELECT block_number
FROM {{ ref('streamline__' ~ model_name.lower() ~ '_complete') }}
WHERE 1=1
{% if not new_build %}
AND block_number >= (SELECT block_number FROM last_3_days)
{% endif %}
)
AND _inserted_timestamp >= DATEADD(
'day',
-4,
SYSDATE()
)
),
ready_blocks AS (
SELECT
block_number
FROM
to_do
{# Prepare the final list of blocks to process #}
,ready_blocks AS (
SELECT block_number
FROM to_do
{% if not new_build %}
UNION
SELECT
block_number
FROM
(
SELECT
block_number
FROM
{{ ref("_missing_receipts") }}
SELECT block_number
FROM {{ ref("_unconfirmed_blocks") }}
UNION
SELECT
block_number
FROM
{{ ref("_missing_txs") }}
SELECT block_number
FROM {{ ref("_missing_txs") }}
UNION
SELECT
block_number
FROM
{{ ref("_unconfirmed_blocks") }}
)
SELECT block_number
FROM {{ ref("_missing_receipts") }}
{% endif %}
{% if testing_limit is not none %}
LIMIT {{ testing_limit }}
{% endif %}
)
{# Generate API requests for each block #}
SELECT
block_number,
ROUND(
block_number,
-3
) AS partition_key,
{{ target.database }}.live.udf_api(
ROUND(block_number, -3) AS partition_key,
live.udf_api(
'POST',
'{service}/{Authentication}',
'{{ node_url }}',
OBJECT_CONSTRUCT(
'Content-Type',
'application/json'
'Content-Type', 'application/json',
'fsc-quantum-state', '{{ model_quantum_state }}'
),
OBJECT_CONSTRUCT(
'id',
block_number,
'jsonrpc',
'2.0',
'method',
'eth_getBlockReceipts',
'params',
ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number))),
'vault/prod/blast/mainnet'
'id', block_number,
'jsonrpc', '2.0',
'method', '{{ method }}',
'params', {{ method_params }}
),
'{{ node_secret_path }}'
) AS request
FROM
ready_blocks
ORDER BY
block_number ASC
{{ order_by_clause }}
LIMIT {{ sql_limit }}

View File

@ -1,105 +1,127 @@
{# Set variables #}
{%- set model_name = 'TRACES' -%}
{%- set model_type = 'REALTIME' -%}
{%- set min_block = var('GLOBAL_START_UP_BLOCK', none) -%}
{%- set default_vars = set_default_variables_streamline(model_name, model_type) -%}
{# Set up parameters for the streamline process. These will come from the vars set in dbt_project.yml #}
{%- set streamline_params = set_streamline_parameters(
model_name=model_name,
model_type=model_type
) -%}
{%- set node_url = default_vars['node_url'] -%}
{%- set node_secret_path = default_vars['node_secret_path'] -%}
{%- set model_quantum_state = default_vars['model_quantum_state'] -%}
{%- set sql_limit = streamline_params['sql_limit'] -%}
{%- set testing_limit = default_vars['testing_limit'] -%}
{%- set order_by_clause = default_vars['order_by_clause'] -%}
{%- set new_build = default_vars['new_build'] -%}
{%- set method_params = streamline_params['method_params'] -%}
{%- set method = streamline_params['method'] -%}
{# Log configuration details #}
{{ log_streamline_details(
model_name=model_name,
model_type=model_type,
node_url=node_url,
model_quantum_state=model_quantum_state,
sql_limit=sql_limit,
testing_limit=testing_limit,
order_by_clause=order_by_clause,
new_build=new_build,
streamline_params=streamline_params,
method_params=method_params,
method=method,
min_block=min_block
) }}
{# Set up dbt configuration #}
{{ config (
materialized = "view",
post_hook = fsc_utils.if_data_call_function_v2(
func = 'streamline.udf_bulk_rest_api_v2',
target = "{{this.schema}}.{{this.identifier}}",
params ={ "external_table" :"traces",
"sql_limit" :"100000",
"producer_batch_size" :"100000",
"worker_batch_size" :"50000",
"sql_source" :"{{this.identifier}}",
"exploded_key": "[\"result\"]" }
params = streamline_params
),
tags = ['streamline_core_realtime']
) }}
WITH last_3_days AS (
{# Main query starts here #}
WITH
{% if not new_build %}
last_3_days AS (
SELECT block_number
FROM {{ ref("_block_lookback") }}
),
{% endif %}
SELECT
block_number
FROM
{{ ref("_block_lookback") }}
),
{# Identify blocks that need processing #}
to_do AS (
SELECT
block_number
FROM
{{ ref("streamline__blocks") }}
SELECT block_number
FROM {{ ref("streamline__blocks") }}
WHERE
(
block_number >= (
SELECT
block_number
FROM
last_3_days
)
)
AND block_number IS NOT NULL
block_number IS NOT NULL
{% if not new_build %}
AND block_number >= (SELECT block_number FROM last_3_days)
{% endif %}
{% if min_block is not none %}
AND block_number >= {{ min_block }}
{% endif %}
EXCEPT
SELECT
block_number
FROM
{{ ref("streamline__complete_traces") }}
WHERE
block_number >= (
SELECT
block_number
FROM
last_3_days
{# Exclude blocks that have already been processed #}
SELECT block_number
FROM {{ ref('streamline__' ~ model_name.lower() ~ '_complete') }}
WHERE 1=1
{% if not new_build %}
AND block_number >= (SELECT block_number FROM last_3_days)
{% endif %}
)
AND _inserted_timestamp >= DATEADD(
'day',
-4,
SYSDATE()
)
),
ready_blocks AS (
SELECT
block_number
FROM
to_do
{# Prepare the final list of blocks to process #}
,ready_blocks AS (
SELECT block_number
FROM to_do
{% if not new_build %}
UNION
SELECT
block_number
FROM
(
SELECT
block_number
FROM
{{ ref("_missing_traces") }}
SELECT block_number
FROM {{ ref("_unconfirmed_blocks") }}
UNION
SELECT
block_number
FROM
{{ ref("_unconfirmed_blocks") }}
)
SELECT block_number
FROM {{ ref("_missing_traces") }}
{% endif %}
{% if testing_limit is not none %}
LIMIT {{ testing_limit }}
{% endif %}
)
{# Generate API requests for each block #}
SELECT
block_number,
ROUND(
block_number,
-3
) AS partition_key,
{{ target.database }}.live.udf_api(
ROUND(block_number, -3) AS partition_key,
live.udf_api(
'POST',
'{service}/{Authentication}',
'{{ node_url }}',
OBJECT_CONSTRUCT(
'Content-Type',
'application/json'
'Content-Type', 'application/json',
'fsc-quantum-state', '{{ model_quantum_state }}'
),
OBJECT_CONSTRUCT(
'id',
block_number,
'jsonrpc',
'2.0',
'method',
'debug_traceBlockByNumber',
'params',
ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number), OBJECT_CONSTRUCT('tracer', 'callTracer', 'timeout', '30s'))
'id', block_number,
'jsonrpc', '2.0',
'method', '{{ method }}',
'params', {{ method_params }}
),
'vault/prod/blast/mainnet'
'{{ node_secret_path }}'
) AS request
FROM
ready_blocks
ORDER BY
block_number ASC
{{ order_by_clause }}
LIMIT {{ sql_limit }}

View File

@ -1,104 +0,0 @@
{{ config (
materialized = "view",
post_hook = fsc_utils.if_data_call_function_v2(
func = 'streamline.udf_bulk_rest_api_v2',
target = "{{this.schema}}.{{this.identifier}}",
params ={ "external_table" :"transactions",
"sql_limit" :"100000",
"producer_batch_size" :"100000",
"worker_batch_size" :"50000",
"sql_source" :"{{this.identifier}}",
"exploded_key": "[\"result\", \"transactions\"]" }
),
tags = ['streamline_core_realtime']
) }}
WITH last_3_days AS (
SELECT
block_number
FROM
{{ ref("_block_lookback") }}
),
to_do AS (
SELECT
block_number
FROM
{{ ref("streamline__blocks") }}
WHERE
(
block_number >= (
SELECT
block_number
FROM
last_3_days
)
)
AND block_number IS NOT NULL
EXCEPT
SELECT
block_number
FROM
{{ ref("streamline__complete_transactions") }}
WHERE
block_number >= (
SELECT
block_number
FROM
last_3_days
)
AND _inserted_timestamp >= DATEADD(
'day',
-4,
SYSDATE()
)
),
ready_blocks AS (
SELECT
block_number
FROM
to_do
UNION
SELECT
block_number
FROM
(
SELECT
block_number
FROM
{{ ref("_missing_txs") }}
UNION
SELECT
block_number
FROM
{{ ref("_unconfirmed_blocks") }}
)
)
SELECT
block_number,
ROUND(
block_number,
-3
) AS partition_key,
{{ target.database }}.live.udf_api(
'POST',
'{service}/{Authentication}',
OBJECT_CONSTRUCT(
'Content-Type',
'application/json'
),
OBJECT_CONSTRUCT(
'id',
block_number,
'jsonrpc',
'2.0',
'method',
'eth_getBlockByNumber',
'params',
ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number), TRUE)),
'vault/prod/blast/mainnet'
) AS request
FROM
ready_blocks
ORDER BY
block_number ASC

View File

@ -2,30 +2,16 @@
materialized = "ephemeral"
) }}
WITH lookback AS (
SELECT
block_number
DISTINCT tx.block_number
FROM
{{ ref("_block_lookback") }}
)
SELECT
DISTINCT tx.block_number block_number
FROM
{{ ref("silver__transactions") }}
{{ ref("test_silver__transactions_recent") }}
tx
LEFT JOIN {{ ref("core__fact_traces") }}
tr
ON tx.block_number = tr.block_number
AND tx.tx_hash = tr.tx_hash
WHERE
tx.block_timestamp >= DATEADD('hour', -84, SYSDATE())
AND tr.tx_hash IS NULL
AND tx.block_number >= (
SELECT
block_number
FROM
lookback
LEFT JOIN {{ ref("test_gold__fact_traces_recent") }}
tr USING (
block_number,
tx_hash
)
AND tr.block_timestamp >= DATEADD('hour', -84, SYSDATE())
AND tr.block_timestamp IS NOT NULL
WHERE
tr.tx_hash IS NULL
AND tx.block_timestamp > DATEADD('day', -5, SYSDATE())

View File

@ -1,17 +1,29 @@
{%- if flags.WHICH == 'compile' and execute -%}
{% set config_log = '\n' %}
{% set config_log = config_log ~ '\n=== DBT Model Config ===\n'%}
{% set config_log = config_log ~ '\n{{ config (\n' %}
{% set config_log = config_log ~ ' materialized = "' ~ config.get('materialized') ~ '",\n' %}
{% set config_log = config_log ~ ' tags = ' ~ config.get('tags') | tojson ~ '\n' %}
{% set config_log = config_log ~ ') }}\n' %}
{{ log(config_log, info=True) }}
{{ log("", info=True) }}
{%- endif -%}
{{ config (
materialized = "view",
tags = ['streamline_core_complete']
) }}
SELECT
_id AS block_number,
REPLACE(
concat_ws('', '0x', to_char(block_number, 'XXXXXXXX')),
' ',
''
) AS block_number_hex
_id,
(
({{ var('GLOBAL_BLOCKS_PER_HOUR',0) }} / 60) * {{ var('GLOBAL_CHAINHEAD_DELAY',3) }}
) :: INT AS block_number_delay, --minute-based block delay
(_id - block_number_delay) :: INT AS block_number,
utils.udf_int_to_hex(block_number) AS block_number_hex
FROM
{{ ref("silver__number_sequence") }}
{{ ref('silver__number_sequence') }}
WHERE
_id <= (
SELECT
@ -22,5 +34,3 @@ WHERE
FROM
{{ ref("streamline__get_chainhead") }}
)
ORDER BY
_id ASC

View File

@ -1,21 +1,45 @@
{%- set model_quantum_state = var('CHAINHEAD_QUANTUM_STATE', 'livequery') -%}
{%- set node_url = var('GLOBAL_NODE_URL', '{Service}/{Authentication}') -%}
{%- if flags.WHICH == 'compile' and execute -%}
{{ log("=== Current Variable Settings ===", info=True) }}
{{ log("CHAINHEAD_QUANTUM_STATE: " ~ model_quantum_state, info=True) }}
{{ log("", info=True) }}
{{ log("=== API Details ===", info=True) }}
{{ log("NODE_URL: " ~ node_url, info=True) }}
{{ log("NODE_SECRET_PATH: " ~ var('GLOBAL_NODE_SECRET_PATH'), info=True) }}
{{ log("", info=True) }}
{% set config_log = '\n' %}
{% set config_log = config_log ~ '\n=== DBT Model Config ===\n'%}
{% set config_log = config_log ~ '\n{{ config (\n' %}
{% set config_log = config_log ~ ' materialized = "' ~ config.get('materialized') ~ '",\n' %}
{% set config_log = config_log ~ ' tags = ' ~ config.get('tags') | tojson ~ '\n' %}
{% set config_log = config_log ~ ') }}\n' %}
{{ log(config_log, info=True) }}
{{ log("", info=True) }}
{%- endif -%}
{{ config (
materialized = 'table',
tags = ['streamline_core_complete']
tags = ['streamline_core_complete','chainhead']
) }}
SELECT
{{ target.database }}.live.udf_api(
live.udf_api(
'POST',
'{service}/{Authentication}',
'{{ node_url }}',
OBJECT_CONSTRUCT(
'Content-Type',
'application/json',
'fsc-quantum-state',
'livequery'
'Content-Type', 'application/json',
'fsc-quantum-state', '{{ model_quantum_state }}'
),
OBJECT_CONSTRUCT(
'id',
1,
0,
'jsonrpc',
'2.0',
'method',
@ -23,7 +47,7 @@ SELECT
'params',
[]
),
'vault/prod/blast/mainnet'
'{{ var('GLOBAL_NODE_SECRET_PATH') }}'
) AS resp,
utils.udf_hex_to_int(
resp :data :result :: STRING

View File

@ -0,0 +1,9 @@
version: 2
models:
- name: streamline__get_chainhead
description: "This model is used to get the chainhead from the blockchain."
columns:
- name: BLOCK_NUMBER
tests:
- not_null

View File

@ -0,0 +1,50 @@
{# Set variables #}
{%- set source_name = 'DECODED_LOGS' -%}
{%- set model_type = 'COMPLETE' -%}
{%- set full_refresh_type = var((source_name ~ '_complete_full_refresh').upper(), false) -%}
{% set post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(_log_id)" %}
{# Log configuration details #}
{{ log_model_details() }}
{# Set up dbt configuration #}
-- depends_on: {{ ref('bronze__' ~ source_name.lower()) }}
{{ config (
materialized = "incremental",
unique_key = "_log_id",
cluster_by = "ROUND(block_number, -3)",
incremental_predicates = ["dynamic_range", "block_number"],
merge_update_columns = ["_log_id"],
post_hook = post_hook,
full_refresh = full_refresh_type,
tags = ['streamline_decoded_logs_complete']
) }}
{# Main query starts here #}
SELECT
block_number,
file_name,
id AS _log_id,
{{ dbt_utils.generate_surrogate_key(['id']) }} AS complete_{{ source_name.lower() }}_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp,
_inserted_timestamp,
'{{ invocation_id }}' AS _invocation_id
FROM
{% if is_incremental() %}
{{ ref('bronze__' ~ source_name.lower()) }}
WHERE
_inserted_timestamp >= (
SELECT
COALESCE(MAX(_inserted_timestamp), '1970-01-01'::TIMESTAMP) AS _inserted_timestamp
FROM
{{ this }}
)
{% else %}
{{ ref('bronze__' ~ source_name.lower() ~ '_fr') }}
{% endif %}
QUALIFY (ROW_NUMBER() OVER (PARTITION BY id ORDER BY _inserted_timestamp DESC)) = 1

View File

@ -0,0 +1,110 @@
{%- set testing_limit = var('DECODED_LOGS_REALTIME_TESTING_LIMIT', none) -%}
{%- set streamline_params = {
"external_table": var("DECODED_LOGS_REALTIME_EXTERNAL_TABLE", "decoded_logs"),
"sql_limit": var("DECODED_LOGS_REALTIME_SQL_LIMIT", 10000000),
"producer_batch_size": var("DECODED_LOGS_REALTIME_PRODUCER_BATCH_SIZE", 400000),
"worker_batch_size": var("DECODED_LOGS_REALTIME_WORKER_BATCH_SIZE", 200000),
"sql_source": "decoded_logs_realtime"
} -%}
{# Log configuration details #}
{{ log_model_details(
params = streamline_params
) }}
{# Set up dbt configuration #}
{{ config (
materialized = "view",
post_hook = [fsc_utils.if_data_call_function_v2(
func = 'streamline.udf_bulk_decode_logs_v2',
target = "{{this.schema}}.{{this.identifier}}",
params = {
"external_table": streamline_params['external_table'],
"sql_limit": streamline_params['sql_limit'],
"producer_batch_size": streamline_params['producer_batch_size'],
"worker_batch_size": streamline_params['worker_batch_size'],
"sql_source": streamline_params['sql_source']
}
),
fsc_utils.if_data_call_wait()],
tags = ['streamline_decoded_logs_realtime']
) }}
WITH target_blocks AS (
SELECT
block_number
FROM
{{ ref('core__fact_blocks') }}
WHERE
block_number >= (
SELECT
block_number
FROM
{{ ref('_24_hour_lookback') }}
)
),
existing_logs_to_exclude AS (
SELECT
_log_id
FROM
{{ ref('streamline__decoded_logs_complete') }}
l
INNER JOIN target_blocks b USING (block_number)
WHERE
l.inserted_timestamp :: DATE >= DATEADD('day', -2, SYSDATE())
),
candidate_logs AS (
SELECT
l.block_number,
l.tx_hash,
l.event_index,
l.contract_address,
l.topics,
l.data,
CONCAT(
l.tx_hash :: STRING,
'-',
l.event_index :: STRING
) AS _log_id
FROM
target_blocks b
INNER JOIN {{ ref('core__fact_event_logs') }}
l USING (block_number)
WHERE
l.tx_succeeded
AND l.inserted_timestamp :: DATE >= DATEADD('day', -2, SYSDATE())
)
SELECT
l.block_number,
l._log_id,
A.abi,
OBJECT_CONSTRUCT(
'topics',
l.topics,
'data',
l.data,
'address',
l.contract_address
) AS DATA
FROM
candidate_logs l
INNER JOIN {{ ref('silver__complete_event_abis') }} A
ON A.parent_contract_address = l.contract_address
AND A.event_signature = l.topics [0] :: STRING
AND l.block_number BETWEEN A.start_block
AND A.end_block
WHERE
NOT EXISTS (
SELECT
1
FROM
existing_logs_to_exclude e
WHERE
e._log_id = l._log_id
)
{% if testing_limit is not none %}
LIMIT
{{ testing_limit }}
{% endif %}

View File

@ -1,32 +0,0 @@
-- depends_on: {{ ref('bronze__decoded_logs') }}
{{ config (
materialized = "incremental",
unique_key = "_log_id",
cluster_by = "ROUND(block_number, -3)",
incremental_predicates = ["dynamic_range", "block_number"],
merge_update_columns = ["_log_id"],
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(_log_id)",
tags = ['streamline_decoded_logs_complete']
) }}
SELECT
block_number,
id AS _log_id,
_inserted_timestamp
FROM
{% if is_incremental() %}
{{ ref('bronze__decoded_logs') }}
WHERE
TO_TIMESTAMP_NTZ(_inserted_timestamp) >= (
SELECT
COALESCE(MAX(TO_TIMESTAMP_NTZ(_inserted_timestamp)), '1970-01-01 00:00:00') _inserted_timestamp
FROM
{{ this }})
{% else %}
{{ ref('bronze__fr_decoded_logs') }}
{% endif %}
qualify(ROW_NUMBER() over (PARTITION BY id
ORDER BY
_inserted_timestamp DESC)) = 1

View File

@ -1,84 +0,0 @@
{{ config (
materialized = "view",
post_hook = [if_data_call_function(
func = "{{this.schema}}.udf_bulk_decode_logs(object_construct('sql_source', '{{this.identifier}}', 'external_table', 'DECODED_LOGS', 'sql_limit', {{var('sql_limit','2000000')}}, 'producer_batch_size', {{var('producer_batch_size','400000')}}, 'worker_batch_size', {{var('worker_batch_size','200000')}}))",
target = "{{this.schema}}.{{this.identifier}}"
),
"call system$wait(" ~ var("WAIT", 400) ~ ")" ],
tags = ['streamline_decoded_logs_realtime']
) }}
WITH target_blocks AS (
SELECT
block_number
FROM
{{ ref('core__fact_blocks') }}
WHERE
block_number >= (
SELECT
block_number
FROM
{{ ref("_block_lookback") }}
)
),
existing_logs_to_exclude AS (
SELECT
_log_id
FROM
{{ ref('streamline__complete_decode_logs') }}
l
INNER JOIN target_blocks b USING (block_number)
WHERE
l._inserted_timestamp :: DATE >= DATEADD('day', -5, SYSDATE())
),
candidate_logs AS (
SELECT
l.block_number,
l.tx_hash,
l.event_index,
l.contract_address,
l.topics,
l.data,
CONCAT(
l.tx_hash :: STRING,
'-',
l.event_index :: STRING
) AS _log_id
FROM
target_blocks b
INNER JOIN {{ ref('core__fact_event_logs') }}
l USING (block_number)
WHERE
l.tx_succeeded
AND l.inserted_timestamp :: DATE >= DATEADD('day', -5, SYSDATE())
)
SELECT
l.block_number,
l._log_id,
A.abi AS abi,
OBJECT_CONSTRUCT(
'topics',
l.topics,
'data',
l.data,
'address',
l.contract_address
) AS DATA
FROM
candidate_logs l
INNER JOIN {{ ref('silver__complete_event_abis') }} A
ON A.parent_contract_address = l.contract_address
AND A.event_signature = l.topics [0] :: STRING
AND l.block_number BETWEEN A.start_block
AND A.end_block
WHERE
NOT EXISTS (
SELECT
1
FROM
existing_logs_to_exclude e
WHERE
e._log_id = l._log_id
)
limit 7500000

View File

@ -1,18 +0,0 @@
packages:
- package: calogica/dbt_expectations
version: 0.8.2
- package: dbt-labs/dbt_external_tables
version: 0.8.2
- package: dbt-labs/dbt_utils
version: 1.0.0
- git: https://github.com/FlipsideCrypto/fsc-utils.git
revision: eb33ac727af26ebc8a8cc9711d4a6ebc3790a107
- package: get-select/dbt_snowflake_query_tags
version: 2.5.0
- git: https://github.com/FlipsideCrypto/fsc-evm.git
revision: ec6adae14ab4060ad4a553fb7f32d7e57693996d
- package: calogica/dbt_date
version: 0.7.2
- git: https://github.com/FlipsideCrypto/livequery-models.git
revision: b024188be4e9c6bc00ed77797ebdc92d351d620e
sha1_hash: 622a679ecf98e6ebf3c904241902ce5328c77e52

View File

@ -6,8 +6,6 @@ packages:
- package: dbt-labs/dbt_utils
version: 1.0.0
- git: https://github.com/FlipsideCrypto/fsc-utils.git
revision: v1.29.0
revision: v1.31.0
- package: get-select/dbt_snowflake_query_tags
version: [">=2.0.0", "<3.0.0"]
- git: https://github.com/FlipsideCrypto/fsc-evm.git
revision: v1.5.0