AN-5568/sl-2 (#235)

* initial core streamline model updates for sl2

* dbt project vars, core bronze views and fsc-evm temp macros

* revert

* spacing

* external function uri stg

* sources and references to new table names

* workflow cmds

* chainhead test

* vars clean up

* update api integration

* added prod integration, cleaned up vars, param for silver.traces2

* remove goerli models

* block delay

* var for delay

* remove order by
This commit is contained in:
drethereum 2024-12-10 11:45:02 -07:00 committed by GitHub
parent 4814d90c54
commit ce3d34c527
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
96 changed files with 2756 additions and 2510 deletions

View File

@ -43,4 +43,8 @@ jobs:
dbt deps
- name: Run DBT Jobs
run: |
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "base_models,tag:streamline_core_complete" "base_models,tag:streamline_core_realtime"
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "base_models,tag:streamline_core_complete" "base_models,tag:streamline_core_realtime" "base_models,tag:streamline_core_complete_receipts" "base_models,tag:streamline_core_realtime_receipts" "base_models,tag:streamline_core_complete_confirm_blocks" "base_models,tag:streamline_core_realtime_confirm_blocks"
- name: Run Chainhead Tests
run: |
dbt test -m "base_models,tag:chainhead"

View File

@ -29,7 +29,7 @@ on:
description: 'DBT Run Command'
required: true
options:
- dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "base_models,tag:streamline_core_complete" "base_models,tag:streamline_core_history"
- dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "base_models,tag:streamline_core_complete" "base_models,tag:streamline_core_history" "base_models,tag:streamline_core_complete_receipts" "base_models,tag:streamline_core_history_receipts" "base_models,tag:streamline_core_complete_confirm_blocks" "base_models,tag:streamline_core_history_confirm_blocks"
- dbt run --threads 8 --vars '{"STREAMLINE_INVOKE_STREAMS":True,"WAIT":120}' -m "base_models,tag:streamline_decoded_logs_complete" "base_models,tag:streamline_decoded_logs_history"
env:

View File

@ -74,4 +74,56 @@ vars:
API_INTEGRATION: '{{ var("config")[target.name]["API_INTEGRATION"] if var("config")[target.name] else var("config")["dev"]["API_INTEGRATION"] }}'
EXTERNAL_FUNCTION_URI: '{{ var("config")[target.name]["EXTERNAL_FUNCTION_URI"] if var("config")[target.name] else var("config")["dev"]["EXTERNAL_FUNCTION_URI"] }}'
ROLES: |
["INTERNAL_DEV"]
["INTERNAL_DEV"]
#### STREAMLINE 2.0 BEGIN ####
API_INTEGRATION: '{{ var("config")[target.name]["API_INTEGRATION"] if var("config")[target.name] else var("config")["dev"]["API_INTEGRATION"] }}'
EXTERNAL_FUNCTION_URI: '{{ var("config")[target.name]["EXTERNAL_FUNCTION_URI"] if var("config")[target.name] else var("config")["dev"]["EXTERNAL_FUNCTION_URI"] }}'
ROLES: |
["INTERNAL_DEV"]
config:
# The keys correspond to dbt profiles and are case sensitive
dev:
API_INTEGRATION: AWS_BASE_API_STG_V2
EXTERNAL_FUNCTION_URI: p2bt501b4d.execute-api.us-east-1.amazonaws.com/stg/
ROLES:
- AWS_LAMBDA_BASE_API
- INTERNAL_DEV
prod:
API_INTEGRATION: AWS_BASE_API_PROD_V2
EXTERNAL_FUNCTION_URI: 6zxz2oxkwk.execute-api.us-east-1.amazonaws.com/prod/
ROLES:
- AWS_LAMBDA_BASE_API
- INTERNAL_DEV
- DBT_CLOUD_BASE
#### STREAMLINE 2.0 END ####
#### FSC_EVM BEGIN ####
# Visit https://github.com/FlipsideCrypto/fsc-evm/wiki for more information on required and optional variables
### GLOBAL VARIABLES BEGIN ###
## REQUIRED
GLOBAL_PROD_DB_NAME: 'base'
GLOBAL_NODE_SECRET_PATH: 'Vault/prod/base/quicknode/base_mainnet'
GLOBAL_BLOCKS_PER_HOUR: 1800
GLOBAL_USES_STREAMLINE_V1: True
GLOBAL_USES_SINGLE_FLIGHT_METHOD: True
### GLOBAL VARIABLES END ###
### MAIN_PACKAGE VARIABLES BEGIN ###
### CORE ###
## REQUIRED
## OPTIONAL
# GOLD_FULL_REFRESH: True
# SILVER_FULL_REFRESH: True
### MAIN_PACKAGE VARIABLES END ###
#### FSC_EVM END ####

View File

@ -0,0 +1,226 @@
{% macro silver_traces_v1(
full_reload_start_block,
full_reload_blocks,
full_reload_mode = false,
TRACES_ARB_MODE = false,
TRACES_SEI_MODE = false,
TRACES_KAIA_MODE = false,
use_partition_key = false,
schema_name = 'bronze'
) %}
WITH bronze_traces AS (
SELECT
block_number,
{% if use_partition_key %}
partition_key,
{% else %}
_partition_by_block_id AS partition_key,
{% endif %}
VALUE :array_index :: INT AS tx_position,
DATA :result AS full_traces,
{% if TRACES_SEI_MODE %}
DATA :txHash :: STRING AS tx_hash,
{% endif %}
_inserted_timestamp
FROM
{% if is_incremental() and not full_reload_mode %}
{{ ref(
schema_name ~ '__traces'
) }}
WHERE
_inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) _inserted_timestamp
FROM
{{ this }}
)
AND DATA :result IS NOT NULL {% if TRACES_ARB_MODE %}
AND block_number > 22207817
{% endif %}
{% elif is_incremental() and full_reload_mode %}
{{ ref(
schema_name ~ '__traces_fr'
) }}
WHERE
{% if use_partition_key %}
partition_key BETWEEN (
SELECT
MAX(partition_key) - 100000
FROM
{{ this }}
)
AND (
SELECT
MAX(partition_key) + {{ full_reload_blocks }}
FROM
{{ this }}
)
{% else %}
_partition_by_block_id BETWEEN (
SELECT
MAX(_partition_by_block_id) - 100000
FROM
{{ this }}
)
AND (
SELECT
MAX(_partition_by_block_id) + {{ full_reload_blocks }}
FROM
{{ this }}
)
{% endif %}
{% if TRACES_ARB_MODE %}
AND block_number > 22207817
{% endif %}
{% else %}
{{ ref(
schema_name ~ '__traces_fr'
) }}
WHERE
{% if use_partition_key %}
partition_key <= {{ full_reload_start_block }}
{% else %}
_partition_by_block_id <= {{ full_reload_start_block }}
{% endif %}
{% if TRACES_ARB_MODE %}
AND block_number > 22207817
{% endif %}
{% endif %}
qualify(ROW_NUMBER() over (PARTITION BY block_number, tx_position
ORDER BY
_inserted_timestamp DESC)) = 1
),
flatten_traces AS (
SELECT
block_number,
{% if TRACES_SEI_MODE %}
tx_hash,
{% else %}
tx_position,
{% endif %}
partition_key,
IFF(
path IN (
'result',
'result.value',
'result.type',
'result.to',
'result.input',
'result.gasUsed',
'result.gas',
'result.from',
'result.output',
'result.error',
'result.revertReason',
'result.time',
'gasUsed',
'gas',
'type',
'to',
'from',
'value',
'input',
'error',
'output',
'time',
'revertReason'
{% if TRACES_ARB_MODE %},
'afterEVMTransfers',
'beforeEVMTransfers',
'result.afterEVMTransfers',
'result.beforeEVMTransfers'
{% endif %}
{% if TRACES_KAIA_MODE %},
'reverted',
'result.reverted'
{% endif %}
),
'ORIGIN',
REGEXP_REPLACE(REGEXP_REPLACE(path, '[^0-9]+', '_'), '^_|_$', '')
) AS trace_address,
_inserted_timestamp,
OBJECT_AGG(
key,
VALUE
) AS trace_json,
CASE
WHEN trace_address = 'ORIGIN' THEN NULL
WHEN POSITION(
'_' IN trace_address
) = 0 THEN 'ORIGIN'
ELSE REGEXP_REPLACE(
trace_address,
'_[0-9]+$',
'',
1,
1
)
END AS parent_trace_address,
SPLIT(
trace_address,
'_'
) AS trace_address_array
FROM
bronze_traces txs,
TABLE(
FLATTEN(
input => PARSE_JSON(
txs.full_traces
),
recursive => TRUE
)
) f
WHERE
f.index IS NULL
AND f.key != 'calls'
AND f.path != 'result'
{% if TRACES_ARB_MODE %}
AND f.path NOT LIKE 'afterEVMTransfers[%'
AND f.path NOT LIKE 'beforeEVMTransfers[%'
{% endif %}
{% if TRACES_KAIA_MODE %}
and f.key not in ('message', 'contract')
{% endif %}
GROUP BY
block_number,
{% if TRACES_SEI_MODE %}
tx_hash,
{% else %}
tx_position,
{% endif %}
partition_key,
trace_address,
_inserted_timestamp
)
SELECT
block_number,
{% if TRACES_SEI_MODE %}
tx_hash,
{% else %}
tx_position,
{% endif %}
trace_address,
parent_trace_address,
trace_address_array,
trace_json,
partition_key,
_inserted_timestamp,
{{ dbt_utils.generate_surrogate_key(
['block_number'] +
(['tx_hash'] if TRACES_SEI_MODE else ['tx_position']) +
['trace_address']
) }} AS traces_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp,
'{{ invocation_id }}' AS _invocation_id
FROM
flatten_traces qualify(ROW_NUMBER() over(PARTITION BY traces_id
ORDER BY
_inserted_timestamp DESC)) = 1
{% endmacro %}

View File

@ -0,0 +1,141 @@
{% macro streamline_external_table_query(
source_name,
source_version,
partition_function,
balances,
block_number,
uses_receipts_by_hash
) %}
{% if source_version != '' %}
{% set source_version = '_' ~ source_version.lower() %}
{% endif %}
WITH meta AS (
SELECT
job_created_time AS _inserted_timestamp,
file_name,
{{ partition_function }} AS partition_key
FROM
TABLE(
information_schema.external_table_file_registration_history(
start_time => DATEADD('day', -3, CURRENT_TIMESTAMP()),
table_name => '{{ source( "bronze_streamline", source_name ~ source_version) }}')
) A
)
SELECT
s.*,
b.file_name,
b._inserted_timestamp
{% if balances %},
r.block_timestamp :: TIMESTAMP AS block_timestamp
{% endif %}
{% if block_number %},
COALESCE(
s.value :"BLOCK_NUMBER" :: STRING,
s.metadata :request :"data" :id :: STRING,
PARSE_JSON(
s.metadata :request :"data"
) :id :: STRING
) :: INT AS block_number
{% endif %}
{% if uses_receipts_by_hash %},
s.value :"TX_HASH" :: STRING AS tx_hash
{% endif %}
FROM
{{ source(
"bronze_streamline",
source_name ~ source_version
) }}
s
JOIN meta b
ON b.file_name = metadata$filename
AND b.partition_key = s.partition_key
{% if balances %}
JOIN {{ ref('_block_ranges') }}
r
ON r.block_number = COALESCE(
s.value :"BLOCK_NUMBER" :: INT,
s.value :"block_number" :: INT
)
{% endif %}
WHERE
b.partition_key = s.partition_key
AND DATA :error IS NULL
AND DATA IS NOT NULL
{% endmacro %}
{% macro streamline_external_table_query_fr(
source_name,
source_version,
partition_function,
partition_join_key,
balances,
block_number,
uses_receipts_by_hash
) %}
{% if source_version != '' %}
{% set source_version = '_' ~ source_version.lower() %}
{% endif %}
WITH meta AS (
SELECT
registered_on AS _inserted_timestamp,
file_name,
{{ partition_function }} AS partition_key
FROM
TABLE(
information_schema.external_table_files(
table_name => '{{ source( "bronze_streamline", source_name ~ source_version) }}'
)
) A
)
SELECT
s.*,
b.file_name,
b._inserted_timestamp
{% if balances %},
r.block_timestamp :: TIMESTAMP AS block_timestamp
{% endif %}
{% if block_number %},
COALESCE(
s.value :"BLOCK_NUMBER" :: STRING,
s.value :"block_number" :: STRING,
s.metadata :request :"data" :id :: STRING,
PARSE_JSON(
s.metadata :request :"data"
) :id :: STRING
) :: INT AS block_number
{% endif %}
{% if uses_receipts_by_hash %},
s.value :"TX_HASH" :: STRING AS tx_hash
{% endif %}
FROM
{{ source(
"bronze_streamline",
source_name ~ source_version
) }}
s
JOIN meta b
ON b.file_name = metadata$filename
AND b.partition_key = s.{{ partition_join_key }}
{% if balances %}
JOIN {{ ref('_block_ranges') }}
r
ON r.block_number = COALESCE(
s.value :"BLOCK_NUMBER" :: INT,
s.value :"block_number" :: INT
)
{% endif %}
WHERE
b.partition_key = s.{{ partition_join_key }}
AND DATA :error IS NULL
AND DATA IS NOT NULL
{% endmacro %}

View File

@ -0,0 +1,36 @@
{% macro log_bronze_details(source_name, source_version, model_type, partition_function, partition_join_key, block_number, uses_receipts_by_hash) %}
{% if source_version != '' %}
{% set source_version = '_' ~ source_version.lower() %}
{% endif %}
{% if model_type != '' %}
{% set model_type = '_' ~ model_type %}
{% endif %}
{%- if flags.WHICH == 'compile' and execute -%}
{{ log("=== Current Variable Settings ===", info=True) }}
{{ log(source_name ~ model_type ~ '_PARTITION_FUNCTION: ' ~ partition_function, info=True) }}
{{ log(source_name ~ model_type ~ '_PARTITION_JOIN_KEY: ' ~ partition_join_key, info=True) }}
{{ log(source_name ~ model_type ~ '_BLOCK_NUMBER: ' ~ block_number, info=True) }}
{% if uses_receipts_by_hash %}
{{ log("USES_RECEIPTS_BY_HASH: " ~ uses_receipts_by_hash, info=True) }}
{% endif %}
{{ log("", info=True) }}
{{ log("=== Source Details ===", info=True) }}
{{ log("Source: " ~ source('bronze_streamline', source_name.lower() ~ source_version.lower()), info=True) }}
{{ log("", info=True) }}
{% set config_log = '\n' %}
{% set config_log = config_log ~ '\n=== DBT Model Config ===\n'%}
{% set config_log = config_log ~ '\n{{ config (\n' %}
{% set config_log = config_log ~ ' materialized = "' ~ config.get('materialized') ~ '",\n' %}
{% set config_log = config_log ~ ' tags = ' ~ config.get('tags') | tojson ~ '\n' %}
{% set config_log = config_log ~ ') }}\n' %}
{{ log(config_log, info=True) }}
{{ log("", info=True) }}
{%- endif -%}
{% endmacro %}

View File

@ -0,0 +1,29 @@
{% macro log_complete_details(post_hook, full_refresh_type, uses_receipts_by_hash) %}
{%- if flags.WHICH == 'compile' and execute -%}
{% if uses_receipts_by_hash %}
{{ log("=== Current Variable Settings ===", info=True) }}
{{ log("USES_RECEIPTS_BY_HASH: " ~ uses_receipts_by_hash, info=True) }}
{% endif %}
{% set config_log = '\n' %}
{% set config_log = config_log ~ '\n=== DBT Model Config ===\n'%}
{% set config_log = config_log ~ '\n{{ config (\n' %}
{% set config_log = config_log ~ ' materialized = "' ~ config.get('materialized') ~ '",\n' %}
{% set config_log = config_log ~ ' unique_key = "' ~ config.get('unique_key') ~ '",\n' %}
{% set config_log = config_log ~ ' cluster_by = "' ~ config.get('cluster_by') ~ '",\n' %}
{% set config_log = config_log ~ ' merge_update_columns = ' ~ config.get('merge_update_columns') | tojson ~ ',\n' %}
{% set config_log = config_log ~ ' post_hook = "' ~ post_hook ~ '",\n' %}
{% set config_log = config_log ~ ' incremental_predicates = ' ~ config.get('incremental_predicates') | tojson ~ ',\n' %}
{% set config_log = config_log ~ ' full_refresh = ' ~ full_refresh_type ~ ',\n' %}
{% set config_log = config_log ~ ' tags = ' ~ config.get('tags') | tojson ~ '\n' %}
{% set config_log = config_log ~ ') }}\n' %}
{{ log(config_log, info=True) }}
{{ log("", info=True) }}
{%- endif -%}
{% endmacro %}

View File

@ -0,0 +1,55 @@
{% macro log_streamline_details(model_name, model_type, node_url, model_quantum_state, sql_limit, testing_limit, order_by_clause, new_build, streamline_params, uses_receipts_by_hash, method, method_params, min_block=0) %}
{%- if flags.WHICH == 'compile' and execute -%}
{{ log("=== Current Variable Settings ===", info=True) }}
{{ log("START_UP_BLOCK: " ~ min_block, info=True) }}
{{ log("", info=True) }}
{{ log("=== API Details ===", info=True) }}
{{ log("NODE_URL: " ~ node_url, info=True) }}
{{ log("NODE_SECRET_PATH: " ~ var('GLOBAL_NODE_SECRET_PATH'), info=True) }}
{{ log("", info=True) }}
{{ log("=== Current Variable Settings ===", info=True) }}
{{ log((model_name ~ '_' ~ model_type ~ '_model_quantum_state').upper() ~ ': ' ~ model_quantum_state, info=True) }}
{{ log((model_name ~ '_' ~ model_type ~ '_sql_limit').upper() ~ ': ' ~ sql_limit, info=True) }}
{{ log((model_name ~ '_' ~ model_type ~ '_testing_limit').upper() ~ ': ' ~ testing_limit, info=True) }}
{{ log((model_name ~ '_' ~ model_type ~ '_order_by_clause').upper() ~ ': ' ~ order_by_clause, info=True) }}
{{ log((model_name ~ '_' ~ model_type ~ '_new_build').upper() ~ ': ' ~ new_build, info=True) }}
{{ log('USES_RECEIPTS_BY_HASH' ~ ': ' ~ uses_receipts_by_hash, info=True) }}
{{ log("", info=True) }}
{{ log("=== RPC Details ===", info=True) }}
{{ log(model_name ~ ": {", info=True) }}
{{ log(" method: '" ~ method ~ "',", info=True) }}
{{ log(" method_params: " ~ method_params, info=True) }}
{{ log("}", info=True) }}
{{ log("", info=True) }}
{% set params_str = streamline_params | tojson %}
{% set params_formatted = params_str | replace('{', '{\n ') | replace('}', '\n }') | replace(', ', ',\n ') %}
{# Clean up the method_params formatting #}
{% set params_formatted = params_formatted | replace('"method_params": "', '"method_params": "') | replace('\\n', ' ') | replace('\\u0027', "'") %}
{% set config_log = '\n' %}
{% set config_log = config_log ~ '\n=== DBT Model Config ===\n'%}
{% set config_log = config_log ~ '\n{{ config (\n' %}
{% set config_log = config_log ~ ' materialized = "' ~ config.get('materialized') ~ '",\n' %}
{% set config_log = config_log ~ ' post_hook = fsc_utils.if_data_call_function_v2(\n' %}
{% set config_log = config_log ~ ' func = "streamline.udf_bulk_rest_api_v2",\n' %}
{% set config_log = config_log ~ ' target = "' ~ this.schema ~ '.' ~ this.identifier ~ '",\n' %}
{% set config_log = config_log ~ ' params = ' ~ params_formatted ~ '\n' %}
{% set config_log = config_log ~ ' ),\n' %}
{% set config_log = config_log ~ ' tags = ' ~ config.get('tags') | tojson ~ '\n' %}
{% set config_log = config_log ~ ') }}\n' %}
{{ log(config_log, info=True) }}
{{ log("", info=True) }}
{%- endif -%}
{% endmacro %}

View File

@ -0,0 +1,47 @@
{% macro set_default_variables_streamline(model_name, model_type) %}
{%- set node_url = var('GLOBAL_NODE_URL', '{Service}/{Authentication}') -%}
{%- set node_secret_path = var('GLOBAL_NODE_SECRET_PATH', '') -%}
{%- set model_quantum_state = var((model_name ~ '_' ~ model_type ~ '_quantum_state').upper(), 'streamline') -%}
{%- set testing_limit = var((model_name ~ '_' ~ model_type ~ '_testing_limit').upper(), none) -%}
{%- set new_build = var((model_name ~ '_' ~ model_type ~ '_new_build').upper(), false) -%}
{%- set default_order = 'ORDER BY partition_key DESC, block_number DESC' if model_type.lower() == 'realtime'
else 'ORDER BY partition_key ASC, block_number ASC' -%}
{%- set order_by_clause = var((model_name ~ '_' ~ model_type ~ '_order_by_clause').upper(), default_order) -%}
{%- set uses_receipts_by_hash = var('GLOBAL_USES_RECEIPTS_BY_HASH', false) -%}
{%- set variables = {
'node_url': node_url,
'node_secret_path': node_secret_path,
'model_quantum_state': model_quantum_state,
'testing_limit': testing_limit,
'new_build': new_build,
'order_by_clause': order_by_clause,
'uses_receipts_by_hash': uses_receipts_by_hash
} -%}
{{ return(variables) }}
{% endmacro %}
{% macro set_default_variables_bronze(source_name, model_type) %}
{%- set partition_function = var(source_name ~ model_type ~ '_PARTITION_FUNCTION',
"CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER)")
-%}
{%- set partition_join_key = var(source_name ~ model_type ~ '_PARTITION_JOIN_KEY', 'partition_key') -%}
{%- set block_number = var(source_name ~ model_type ~ '_BLOCK_NUMBER', true) -%}
{%- set balances = var(source_name ~ model_type ~ '_BALANCES', false) -%}
{%- set uses_receipts_by_hash = var('GLOBAL_USES_RECEIPTS_BY_HASH', false) -%}
{%- set variables = {
'partition_function': partition_function,
'partition_join_key': partition_join_key,
'block_number': block_number,
'balances': balances,
'uses_receipts_by_hash': uses_receipts_by_hash
} -%}
{{ return(variables) }}
{% endmacro %}

View File

@ -0,0 +1,57 @@
{% macro set_streamline_parameters(model_name, model_type, multiplier=1) %}
{%- set rpc_config_details = {
"blocks_transactions": {
"method": 'eth_getBlockByNumber',
"method_params": 'ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number), TRUE)',
"exploded_key": ['result', 'result.transactions']
},
"receipts_by_hash": {
"method": 'eth_getTransactionReceipt',
"method_params": 'ARRAY_CONSTRUCT(tx_hash)'
},
"receipts": {
"method": 'eth_getBlockReceipts',
"method_params": 'ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number))',
"exploded_key": ['result'],
"lambdas": 2
},
"traces": {
"method": 'debug_traceBlockByNumber',
"method_params": "ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number), OBJECT_CONSTRUCT('tracer', 'callTracer', 'timeout', '120s'))",
"exploded_key": ['result'],
"lambdas": 2
},
"confirm_blocks": {
"method": 'eth_getBlockByNumber',
"method_params": 'ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number), FALSE)'
}
} -%}
{%- set rpc_config = rpc_config_details[model_name.lower()] -%}
{%- set params = {
"external_table": var((model_name ~ '_' ~ model_type ~ '_external_table').upper(), model_name.lower()),
"sql_limit": var((model_name ~ '_' ~ model_type ~ '_sql_limit').upper(), 2 * var('GLOBAL_BLOCKS_PER_HOUR',0) * multiplier),
"producer_batch_size": var((model_name ~ '_' ~ model_type ~ '_producer_batch_size').upper(), 2 * var('GLOBAL_BLOCKS_PER_HOUR',0) * multiplier),
"worker_batch_size": var(
(model_name ~ '_' ~ model_type ~ '_worker_batch_size').upper(),
(2 * var('GLOBAL_BLOCKS_PER_HOUR',0) * multiplier) // (rpc_config.get('lambdas', 1))
),
"sql_source": (model_name ~ '_' ~ model_type).lower(),
"method": rpc_config['method'],
"method_params": rpc_config['method_params']
} -%}
{%- if rpc_config.get('exploded_key') is not none -%}
{%- do params.update({"exploded_key": tojson(rpc_config['exploded_key'])}) -%}
{%- endif -%}
{%- if rpc_config.get('lambdas') is not none -%}
{%- do params.update({"lambdas": rpc_config['lambdas']}) -%}
{%- endif -%}
{{ return(params) }}
{% endmacro %}

View File

@ -1,119 +0,0 @@
{% macro streamline_external_table_query(
model,
partition_function,
partition_name,
unique_key
) %}
WITH meta AS (
SELECT
last_modified AS _inserted_timestamp,
file_name,
{{ partition_function }} AS {{ partition_name }}
FROM
TABLE(
information_schema.external_table_file_registration_history(
start_time => DATEADD('day', -3, CURRENT_TIMESTAMP()),
table_name => '{{ source( "bronze_streamline", model) }}')
) A
)
SELECT
{{ unique_key }},
DATA,
_inserted_timestamp,
MD5(
CAST(
COALESCE(CAST({{ unique_key }} AS text), '' :: STRING) AS text
)
) AS id,
s.{{ partition_name }},
s.value AS VALUE,
file_name
FROM
{{ source(
"bronze_streamline",
model
) }}
s
JOIN meta b
ON b.file_name = metadata$filename
AND b.{{ partition_name }} = s.{{ partition_name }}
WHERE
b.{{ partition_name }} = s.{{ partition_name }}
AND (
DATA :error :code IS NULL
OR DATA :error :code NOT IN (
'-32000',
'-32001',
'-32002',
'-32003',
'-32004',
'-32005',
'-32006',
'-32007',
'-32008',
'-32009',
'-32010',
'-32608'
)
)
{% endmacro %}
{% macro streamline_external_table_fr_query(
model,
partition_function,
partition_name,
unique_key
) %}
WITH meta AS (
SELECT
registered_on AS _inserted_timestamp,
file_name,
{{ partition_function }} AS {{ partition_name }}
FROM
TABLE(
information_schema.external_table_files(
table_name => '{{ source( "bronze_streamline", model) }}'
)
) A
)
SELECT
{{ unique_key }},
DATA,
_inserted_timestamp,
MD5(
CAST(
COALESCE(CAST({{ unique_key }} AS text), '' :: STRING) AS text
)
) AS id,
s.{{ partition_name }},
s.value AS VALUE,
file_name
FROM
{{ source(
"bronze_streamline",
model
) }}
s
JOIN meta b
ON b.file_name = metadata$filename
AND b.{{ partition_name }} = s.{{ partition_name }}
WHERE
b.{{ partition_name }} = s.{{ partition_name }}
AND (
DATA :error :code IS NULL
OR DATA :error :code NOT IN (
'-32000',
'-32001',
'-32002',
'-32003',
'-32004',
'-32005',
'-32006',
'-32007',
'-32008',
'-32009',
'-32010',
'-32608'
)
)
{% endmacro %}

View File

@ -43,7 +43,7 @@ missing_txs AS (
block_number,
tx_position
)
JOIN {{ ref("streamline__complete_debug_traceBlockByNumber") }} USING (block_number)
JOIN {{ ref("streamline__traces_complete") }} USING (block_number)
LEFT JOIN {{ source(
'base_silver',
'overflowed_traces2'

View File

@ -1,27 +0,0 @@
{{ config(
materialized = 'view',
persist_docs ={ "relation": true,
"columns": true }
) }}
SELECT
block_number,
block_timestamp,
network,
blockchain,
tx_count,
difficulty,
total_difficulty,
extra_data,
gas_limit,
gas_used,
HASH,
parent_hash,
receipts_root,
sha3_uncles,
SIZE,
uncle_blocks,
block_header_json
FROM
{{ ref('silver_goerli__blocks') }}

View File

@ -1,4 +0,0 @@
version: 2
models:
- name: goerli__fact_blocks
description: Deprecating soon. Please migrate your queries to mainnet prior to the Sept. 13th deprecation date.

View File

@ -1,24 +0,0 @@
{{ config(
materialized = 'view',
persist_docs ={ "relation": true,
"columns": true }
) }}
SELECT
_log_id,
block_number,
block_hash,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
event_index,
contract_address,
topics,
DATA,
event_removed,
tx_status,
tx_index,
TYPE
FROM {{ ref('silver_goerli__logs') }}

View File

@ -1,4 +0,0 @@
version: 2
models:
- name: goerli__fact_event_logs
description: Deprecating soon. Please migrate your queries to mainnet prior to the Sept. 13th deprecation date.

View File

@ -1,24 +0,0 @@
{{ config(
materialized = 'view',
persist_docs ={ "relation": true,
"columns": true }
) }}
SELECT
tx_hash,
block_number,
block_timestamp,
from_address,
to_address,
eth_value,
gas,
gas_used,
input,
output,
TYPE,
identifier,
DATA,
tx_status,
sub_traces
FROM
{{ ref('silver_goerli__traces') }}

View File

@ -1,4 +0,0 @@
version: 2
models:
- name: goerli__fact_traces
description: Deprecating soon. Please migrate your queries to mainnet prior to the Sept. 13th deprecation date.

View File

@ -1,32 +0,0 @@
{{ config(
materialized = 'view',
persist_docs ={ "relation": true,
"columns": true }
) }}
SELECT
block_number,
block_hash,
block_timestamp,
tx_hash,
nonce,
POSITION,
origin_function_signature,
from_address,
to_address,
eth_value,
gas_price,
gas_limit,
input_data,
tx_status AS status,
gas_used,
cumulative_gas_used,
l1_fee_scalar,
l1_gas_used,
l1_gas_price,
tx_fee,
tx_type,
is_system_tx,
tx_json
FROM
{{ ref('silver_goerli__transactions') }}

View File

@ -1,4 +0,0 @@
version: 2
models:
- name: goerli__fact_transactions
description: Deprecating soon. Please migrate your queries to mainnet prior to the Sept. 13th deprecation date.

View File

@ -1,104 +0,0 @@
{{ config (
materialized = "incremental",
unique_key = "block_number",
cluster_by = "ROUND(block_number, -3)",
merge_update_columns = ["block_number"],
tags = ['base_goerli']
) }}
WITH meta AS (
SELECT
registered_on,
last_modified,
file_name
FROM
TABLE(
information_schema.external_table_files(
table_name => '{{ source( "bronze_streamline", "goerli_blocks") }}'
)
) A
{% if is_incremental() %}
WHERE
LEAST(
registered_on,
last_modified
) >= (
SELECT
COALESCE(MAX(_INSERTED_TIMESTAMP), '1970-01-01' :: DATE) max_INSERTED_TIMESTAMP
FROM
{{ this }})
),
partitions AS (
SELECT
DISTINCT CAST(
SPLIT_PART(SPLIT_PART(file_name, '/', 3), '_', 1) AS INTEGER
) AS _partition_by_block_number
FROM
meta
)
{% else %}
)
{% endif %},
base AS (
SELECT
block_number,
DATA :result AS response,
registered_on AS _inserted_timestamp
FROM
{{ source(
"bronze_streamline",
"goerli_blocks"
) }}
t
JOIN meta b
ON b.file_name = metadata$filename -- add better partitioning
WHERE
DATA :error :code IS NULL
OR DATA :error :code NOT IN (
'-32000',
'-32001',
'-32002',
'-32003',
'-32004',
'-32005',
'-32006',
'-32007',
'-32008',
'-32009',
'-32010'
) qualify(ROW_NUMBER() over (PARTITION BY block_number
ORDER BY
_inserted_timestamp DESC)) = 1
)
SELECT
block_number,
response :baseFeePerGas :: STRING AS baseFeePerGas,
response :difficulty :: STRING AS difficulty,
response :extraData :: STRING AS extraData,
response :gasLimit :: STRING AS gasLimit,
response :gasUsed :: STRING AS gasUsed,
response :hash :: STRING AS block_hash,
response :logsBloom :: STRING AS logsBloom,
response :miner :: STRING AS miner,
response :mixHash :: STRING AS mixHash,
response :nonce :: STRING AS nonce,
response :number :: STRING AS NUMBER,
response :parentHash :: STRING AS parentHash,
response :receiptsRoot :: STRING AS receiptsRoot,
response :sha3Uncles :: STRING AS sha3Uncles,
response :size :: STRING AS SIZE,
response :stateRoot :: STRING AS stateRoot,
response :timestamp :: STRING TIMESTAMP,
response :totalDifficulty :: STRING AS totalDifficulty,
response :transactions AS transactions,
ARRAY_SIZE(
response :transactions
) AS tx_count,
response :transactionsRoot :: STRING AS transactionsRoot,
response :uncles AS uncles,
response,
_inserted_timestamp
FROM
base

View File

@ -1,24 +0,0 @@
version: 2
models:
- name: silver_goerli__blocks_method
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- BLOCK_NUMBER
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: _INSERTED_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_LTZ

View File

@ -1,98 +0,0 @@
{{ config (
materialized = "incremental",
unique_key = "tx_hash",
cluster_by = "ROUND(block_number, -3)",
tags = ['base_goerli']
) }}
WITH meta AS (
SELECT
registered_on,
last_modified,
file_name
FROM
TABLE(
information_schema.external_table_files(
table_name => '{{ source( "bronze_streamline", "goerli_eth_getTransactionReceipt") }}'
)
) A
{% if is_incremental() %}
WHERE
LEAST(
registered_on,
last_modified
) >= (
SELECT
COALESCE(MAX(_INSERTED_TIMESTAMP), '1970-01-01' :: DATE) max_INSERTED_TIMESTAMP
FROM
{{ this }})
),
partitions AS (
SELECT
DISTINCT CAST(
SPLIT_PART(SPLIT_PART(file_name, '/', 3), '_', 1) AS INTEGER
) AS _partition_by_block_number
FROM
meta
)
{% else %}
)
{% endif %},
base AS (
SELECT
block_number,
DATA :result AS response,
registered_on AS _inserted_timestamp
FROM
{{ source(
"bronze_streamline",
"goerli_eth_getTransactionReceipt"
) }}
t
JOIN meta b
ON b.file_name = metadata$filename -- add better partitioning
WHERE
DATA :error :code IS NULL
OR DATA :error :code NOT IN (
'-32000',
'-32001',
'-32002',
'-32003',
'-32004',
'-32005',
'-32006',
'-32007',
'-32008',
'-32009',
'-32010'
)
OR response :: STRING IS NOT NULL
)
SELECT
block_number,
response :blockHash :: STRING AS blockHash,
response :transactionHash :: STRING AS tx_hash,
utils.udf_hex_to_int(
response :transactionIndex :: STRING) :: INTEGER AS tx_index,
response :cumulativeGasUsed :: STRING AS cumulativeGasUsed,
response :effectiveGasPrice :: STRING AS effectiveGasPrice,
response :gasUsed :: STRING AS gasUsed,
response :l1Fee :: STRING AS l1Fee,
response :l1FeeScalar :: STRING AS l1FeeScalar,
response :l1GasUsed :: STRING AS l1GasUsed,
response :l1GasPrice :: STRING AS l1GasPrice,
response :logs AS logs_array,
response :logsBloom :: STRING AS logsBloom,
response :status :: STRING AS status,
response :from :: STRING AS origin_from_address,
response :to :: STRING AS origin_to_address,
response :type :: STRING AS type,
response,
_inserted_timestamp
FROM
base
QUALIFY ROW_NUMBER() OVER (PARTITION BY tx_hash
ORDER BY _inserted_timestamp DESC) = 1

View File

@ -1,21 +0,0 @@
version: 2
models:
- name: silver_goerli__receipts_method
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- TX_HASH
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: _INSERTED_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_LTZ

View File

@ -1,96 +0,0 @@
{{ config (
materialized = "incremental",
unique_key = "ID",
cluster_by = "ROUND(block_number, -3)",
merge_update_columns = ["ID"],
tags = ['base_goerli']
) }}
WITH meta AS (
SELECT
registered_on AS _inserted_timestamp,
last_modified,
file_name
FROM
TABLE(
information_schema.external_table_files(
table_name => '{{ source( "bronze_streamline", "goerli_debug_traceTransaction") }}'
)
) A
{% if is_incremental() %}
WHERE
LEAST(
registered_on,
last_modified
) >= (
SELECT
COALESCE(MAX(_INSERTED_TIMESTAMP), '1970-01-01' :: DATE) max_INSERTED_TIMESTAMP
FROM
{{ this }})
),
partitions AS (
SELECT
DISTINCT CAST(
SPLIT_PART(SPLIT_PART(file_name, '/', 3), '_', 1) AS INTEGER
) AS _partition_by_block_number
FROM
meta
)
{% else %}
)
{% endif %}
SELECT
DATA :result AS response,
SPLIT(
DATA :id :: STRING,
'-'
) AS split_id,
split_id [0] :: INT AS block_number,
split_id [1] :: STRING AS tx_hash,
DATA :id :: STRING AS id,
_inserted_timestamp
FROM
{{ source(
"bronze_streamline",
"goerli_debug_traceTransaction"
) }}
t
JOIN meta b
ON b.file_name = metadata$filename
{% if is_incremental() %}
JOIN partitions p
ON p._partition_by_block_number = t._partition_by_block_id
{% endif %}
WHERE
DATA :error :code IS NULL
OR DATA :error :code NOT IN (
'-32000',
'-32001',
'-32002',
'-32003',
'-32004',
'-32005',
'-32006',
'-32007',
'-32008',
'-32009',
'-32010'
)
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(
_inserted_timestamp
) :: DATE - 1
FROM
{{ this }}
)
{% endif %}
qualify(ROW_NUMBER() over (PARTITION BY id
ORDER BY
_inserted_timestamp DESC)) = 1

View File

@ -1,32 +0,0 @@
version: 2
models:
- name: silver_goerli__traces_method
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: _INSERTED_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 1
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_LTZ
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: RESPONSE
tests:
- not_null

View File

@ -1,119 +0,0 @@
{{ config (
materialized = "incremental",
unique_key = "tx_hash",
cluster_by = "ROUND(block_number, -3)",
merge_update_columns = ["tx_hash"],
tags = ['base_goerli']
) }}
WITH meta AS (
SELECT
registered_on,
last_modified,
file_name
FROM
TABLE(
information_schema.external_table_files(
table_name => '{{ source( "bronze_streamline", "goerli_transactions") }}'
)
) A
{% if is_incremental() %}
WHERE
LEAST(
registered_on,
last_modified
) >= (
SELECT
COALESCE(MAX(_INSERTED_TIMESTAMP), '1970-01-01' :: DATE) max_INSERTED_TIMESTAMP
FROM
{{ this }})
),
partitions AS (
SELECT
DISTINCT CAST(
SPLIT_PART(SPLIT_PART(file_name, '/', 3), '_', 1) AS INTEGER
) AS _partition_by_block_number
FROM
meta
)
{% else %}
)
{% endif %},
base AS (
SELECT
block_number,
DATA :result AS block_response,
DATA :result :transactions AS tx_response,
registered_on AS _inserted_timestamp
FROM
{{ source(
"bronze_streamline",
"goerli_transactions"
) }}
t
JOIN meta b
ON b.file_name = metadata$filename --needs better partitioning once Ryan fixes his version of the model
WHERE
DATA :error :code IS NULL
OR DATA :error :code NOT IN (
'-32000',
'-32001',
'-32002',
'-32003',
'-32004',
'-32005',
'-32006',
'-32007',
'-32008',
'-32009',
'-32010'
) qualify(ROW_NUMBER() over (PARTITION BY block_number
ORDER BY
_inserted_timestamp DESC)) = 1
),
flat AS (
SELECT
block_number,
block_response :timestamp :: STRING AS block_timestamp,
VALUE :hash :: STRING AS tx_hash,
VALUE :blockHash :: STRING AS block_hash,
VALUE :blockNumber :: STRING AS blockNumber,
VALUE :chainId :: STRING AS chainId,
VALUE :from :: STRING AS from_address,
VALUE :gas :: STRING AS gas_limit,
VALUE :gasPrice :: STRING AS gas_price,
VALUE :input :: STRING AS input,
CASE
WHEN VALUE :isSystemTx :: STRING = 'true' THEN TRUE
ELSE FALSE
END AS is_system_tx,
VALUE :maxFeePerGas :: STRING AS max_fee_per_gas,
VALUE :mint :: STRING AS mint,
VALUE :maxPriorityFeePerGas :: STRING AS max_priority_fee_per_gas,
VALUE :nonce :: STRING AS nonce,
VALUE :r :: STRING AS r,
VALUE :s :: STRING AS s,
VALUE :sourceHash :: STRING AS sourceHash,
VALUE :to :: STRING AS to_address,
VALUE :transactionIndex :: STRING POSITION,
VALUE :type :: STRING AS tx_type,
VALUE :v :: STRING AS v,
VALUE :value :: STRING AS eth_value,
VALUE :accessList AS accessList,
VALUE,
block_response,
_inserted_timestamp
FROM
base,
LATERAL FLATTEN (
input => tx_response
)
)
SELECT
*
FROM
flat qualify(ROW_NUMBER() over (PARTITION BY tx_hash
ORDER BY
_inserted_timestamp DESC)) = 1

View File

@ -1,24 +0,0 @@
version: 2
models:
- name: silver_goerli__tx_method
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- TX_HASH
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: _INSERTED_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_LTZ

View File

@ -1,70 +0,0 @@
{{ config (
materialized = "incremental",
unique_key = "block_number",
cluster_by = "block_timestamp::date",
merge_update_columns = ["block_number"],
tags = ['base_goerli']
) }}
SELECT
block_number,
TO_TIMESTAMP_NTZ(
utils.udf_hex_to_int(
TIMESTAMP :: STRING
)
) AS block_timestamp,
'goerli' AS network,
'base' AS blockchain,
tx_count,
utils.udf_hex_to_int(
difficulty :: STRING
) :: INTEGER AS difficulty,
utils.udf_hex_to_int(
totalDifficulty :: STRING
) :: INTEGER AS total_difficulty,
extraData AS extra_data,
utils.udf_hex_to_int(
gasLimit :: STRING
) :: INTEGER AS gas_limit,
utils.udf_hex_to_int(
gasUsed :: STRING
) :: INTEGER AS gas_used,
block_hash AS HASH,
parentHash AS parent_hash,
receiptsRoot AS receipts_root,
sha3Uncles AS sha3_uncles,
utils.udf_hex_to_int(
SIZE :: STRING
) :: INTEGER AS SIZE,
uncles AS uncle_blocks,
object_construct_keep_null(
'transactions',
transactions,
'transactions_root',
transactionsRoot,
'logs_bloom',
logsBloom,
'miner',
miner,
'mix_hash',
mixHash,
'nonce',
utils.udf_hex_to_int(
nonce :: STRING
) :: INTEGER
) AS block_header_json,
_inserted_timestamp
FROM
{{ ref('silver_goerli__blocks_method') }}
{% if is_incremental() %}
WHERE
_inserted_timestamp >= (
SELECT
MAX(
_inserted_timestamp
)
FROM
{{ this }}
)
{% endif %}

View File

@ -1,116 +0,0 @@
version: 2
models:
- name: silver_goerli__blocks
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- BLOCK_NUMBER
- fsc_utils.sequence_gaps:
column_name: BLOCK_NUMBER
where: BLOCK_TIMESTAMP < CURRENT_DATE - 1
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: BLOCK_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 1
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_NTZ
- name: NETWORK
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- STRING
- VARCHAR
- name: BLOCKCHAIN
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- STRING
- VARCHAR
- name: TX_COUNT
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: DIFFICULTY
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: TOTAL_DIFFICULTY
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: EXTRA_DATA
tests:
- not_null
- name: GAS_LIMIT
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: GAS_USED
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: PARENT_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: RECEIPTS_ROOT
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: SHA3_UNCLES
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: SIZE
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- name: BLOCK_HEADER_JSON
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- OBJECT

View File

@ -1,184 +0,0 @@
{{ config (
materialized = "incremental",
unique_key = "_log_id",
cluster_by = "ROUND(block_number, -3)",
tags = ['base_goerli']
) }}
WITH logs_response AS (
SELECT
block_number,
blockHash AS block_hash,
logs_array,
VALUE :address :: STRING AS contract_address,
VALUE :data :: STRING AS data,
utils.udf_hex_to_int(
VALUE :logIndex :: STRING) :: INTEGER AS event_index,
VALUE :removed :: STRING AS removed,
VALUE :topics AS topics,
VALUE :transactionHash :: STRING AS tx_hash,
VALUE :transactionIndex :: STRING AS transactionIndex,
origin_from_address,
origin_to_address,
status,
type,
CONCAT(
tx_hash,
'-',
event_index
) AS _log_id,
_inserted_timestamp
FROM {{ ref('silver_goerli__receipts_method') }},
LATERAL FLATTEN(input => logs_array)
{% if is_incremental() %}
WHERE _inserted_timestamp >= (
SELECT
MAX(
_inserted_timestamp
)
FROM
{{ this }}
)
{% endif %}
),
flat_base AS (
SELECT
_log_id,
block_number,
block_hash,
tx_hash,
origin_from_address,
CASE
WHEN len(origin_to_address) <= 0 THEN NULL
ELSE origin_to_address
END AS origin_to_address,
event_index,
contract_address,
topics,
DATA,
CASE
WHEN removed = 'true' THEN TRUE
ELSE FALSE
END AS event_removed,
CASE
WHEN status = '0x1' THEN 'SUCCESS'
ELSE 'FAIL'
END AS tx_status,
utils.udf_hex_to_int(
transactionIndex
) :: INTEGER AS tx_index,
utils.udf_hex_to_int(
TYPE
) :: INTEGER AS TYPE,
_inserted_timestamp
FROM
logs_response
),
new_records AS (
SELECT
f.block_number,
b.block_timestamp,
f.block_hash,
f.tx_hash,
f.origin_from_address,
f.origin_to_address,
f.event_index,
f.contract_address,
f.topics,
f.data,
f.event_removed,
f.tx_status,
f.tx_index,
f.type,
t.origin_function_signature,
CASE
WHEN t.origin_function_signature IS NULL
OR b.block_timestamp IS NULL THEN TRUE
ELSE FALSE
END AS is_pending,
f._log_id,
f._inserted_timestamp
FROM
flat_base f
LEFT OUTER JOIN {{ ref('silver_goerli__transactions') }}
t
ON f.tx_hash = t.tx_hash
AND f.block_number = t.block_number
LEFT OUTER JOIN {{ ref('silver_goerli__blocks') }}
b
ON f.block_number = b.block_number
)
{% if is_incremental() %},
missing_data AS (
SELECT
t.block_number,
b.block_timestamp,
t.block_hash,
t.tx_hash,
t.origin_from_address,
t.origin_to_address,
t.event_index,
t.contract_address,
t.topics,
t.data,
t.event_removed,
t.tx_status,
t.tx_index,
t.type,
txs.origin_function_signature,
FALSE AS is_pending,
t._log_id,
GREATEST(
t._inserted_timestamp,
b._inserted_timestamp,
txs._inserted_timestamp
) AS _inserted_timestamp
FROM
{{ this }}
t
INNER JOIN {{ ref('silver_goerli__transactions') }}
txs
ON t.tx_hash = txs.tx_hash
AND t.block_number = txs.block_number
INNER JOIN {{ ref('silver_goerli__blocks') }}
b
ON t.block_number = b.block_number
WHERE
t.is_pending
)
{% endif %}
SELECT
block_number,
block_timestamp,
block_hash,
tx_hash,
origin_from_address,
origin_to_address,
event_index,
contract_address,
topics,
DATA,
event_removed,
tx_status,
tx_index,
TYPE,
origin_function_signature,
is_pending,
_log_id,
_inserted_timestamp
FROM
new_records qualify(ROW_NUMBER() over (PARTITION BY block_number, _log_id
ORDER BY
_inserted_timestamp DESC)) = 1
{% if is_incremental() %}
UNION
SELECT
*
FROM
missing_data
{% endif %}

View File

@ -1,50 +0,0 @@
version: 2
models:
- name: silver_goerli__logs
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
- fsc_utils.sequence_gaps:
partition_by:
- BLOCK_NUMBER
- TX_HASH
column_name: EVENT_INDEX
where: BLOCK_TIMESTAMP < CURRENT_DATE
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: BLOCK_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: ORIGIN_FROM_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: ORIGIN_TO_ADDRESS
tests:
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
where: ORIGIN_TO_ADDRESS IS NOT NULL
- name: _INSERTED_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_LTZ

View File

@ -1,214 +0,0 @@
{{ config (
materialized = "incremental",
unique_key = "_call_id",
cluster_by = "ROUND(block_number, -3)",
merge_update_columns = ["_call_id"],
tags = ['base_goerli']
) }}
WITH new_txs AS (
SELECT
block_number,
tx_hash,
response,
_inserted_timestamp
FROM
{{ ref('silver_goerli__traces_method') }}
{% if is_incremental() %}
WHERE
_inserted_timestamp >= (
SELECT
COALESCE(MAX(_INSERTED_TIMESTAMP), '1970-01-01' :: DATE) max_INSERTED_TIMESTAMP
FROM
{{ this }})
{% endif %}
),
base_table AS (
SELECT
CASE
WHEN POSITION(
'.',
path :: STRING
) > 0 THEN REPLACE(
REPLACE(
path :: STRING,
SUBSTR(path :: STRING, len(path :: STRING) - POSITION('.', REVERSE(path :: STRING)) + 1, POSITION('.', REVERSE(path :: STRING))),
''
),
'.',
'__'
)
ELSE '__'
END AS id,
OBJECT_AGG(
DISTINCT key,
VALUE
) AS DATA,
tx_hash,
block_number,
_inserted_timestamp
FROM
new_txs txs,
TABLE(
FLATTEN(
input => PARSE_JSON(
txs.response
),
recursive => TRUE
)
) f
WHERE
f.index IS NULL
AND f.key != 'calls'
GROUP BY
tx_hash,
id,
block_number,
_inserted_timestamp
),
flattened_traces AS (
SELECT
DATA :from :: STRING AS from_address,
utils.udf_hex_to_int(
DATA :gas :: STRING
) AS gas,
utils.udf_hex_to_int(
DATA :gasUsed :: STRING
) AS gas_used,
DATA :input :: STRING AS input,
DATA :output :: STRING AS output,
DATA :to :: STRING AS to_address,
DATA :type :: STRING AS TYPE,
CASE
WHEN DATA :type :: STRING = 'CALL' THEN utils.udf_hex_to_int(
DATA :value :: STRING
) / pow(
10,
18
)
ELSE 0
END AS eth_value,
CASE
WHEN id = '__' THEN CONCAT(
DATA :type :: STRING,
'_ORIGIN'
)
ELSE CONCAT(
DATA :type :: STRING,
'_',
REPLACE(
REPLACE(REPLACE(REPLACE(id, 'calls', ''), '[', ''), ']', ''),
'__',
'_'
)
)
END AS identifier,
concat_ws(
'-',
tx_hash,
identifier
) AS _call_id,
SPLIT(
identifier,
'_'
) AS id_split,
ARRAY_SLICE(id_split, 1, ARRAY_SIZE(id_split)) AS levels,
ARRAY_TO_STRING(
levels,
'_'
) AS LEVEL,
CASE
WHEN ARRAY_SIZE(levels) = 1
AND levels [0] :: STRING = 'ORIGIN' THEN NULL
WHEN ARRAY_SIZE(levels) = 1 THEN 'ORIGIN'
ELSE ARRAY_TO_STRING(ARRAY_SLICE(levels, 0, ARRAY_SIZE(levels) -1), '_')END AS parent_level,
COUNT(parent_level) over (
PARTITION BY tx_hash,
parent_level
) AS sub_traces,*
FROM
base_table
),
group_sub_traces AS (
SELECT
tx_hash,
parent_level,
sub_traces
FROM
flattened_traces
GROUP BY
tx_hash,
parent_level,
sub_traces
),
FINAL AS (
SELECT
flattened_traces.tx_hash AS tx_hash,
flattened_traces.block_number AS block_number,
flattened_traces.from_address AS from_address,
flattened_traces.to_address AS to_address,
flattened_traces.eth_value AS eth_value,
COALESCE(
flattened_traces.gas,
0
) AS gas,
COALESCE(
flattened_traces.gas_used,
0
) AS gas_used,
flattened_traces.input AS input,
flattened_traces.output AS output,
flattened_traces.type AS TYPE,
flattened_traces.identifier AS identifier,
flattened_traces._call_id AS _call_id,
flattened_traces.data AS DATA,
flattened_traces._inserted_timestamp AS _inserted_timestamp,
group_sub_traces.sub_traces AS sub_traces
FROM
flattened_traces
LEFT OUTER JOIN group_sub_traces
ON flattened_traces.tx_hash = group_sub_traces.tx_hash
AND flattened_traces.level = group_sub_traces.parent_level
)
SELECT
f.tx_hash,
f.block_number,
t.block_timestamp,
f.from_address,
f.to_address,
f.eth_value,
f.gas,
f.gas_used,
f.input,
f.output,
f.type,
f.identifier,
f._call_id,
f.data,
t.tx_status,
f.sub_traces,
f._inserted_timestamp
FROM
FINAL f
JOIN {{ ref('silver_goerli__transactions') }}
t
ON f.tx_hash = t.tx_hash
WHERE
identifier IS NOT NULL
{% if is_incremental() %}
AND t._inserted_timestamp >= (
SELECT
MAX(
_inserted_timestamp
) :: DATE - 1
FROM
{{ this }}
)
{% endif %}
qualify(ROW_NUMBER() over(PARTITION BY _call_id
ORDER BY
f._inserted_timestamp DESC)) = 1

View File

@ -1,54 +0,0 @@
version: 2
models:
- name: silver_goerli__traces
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _CALL_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: BLOCK_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 1
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_NTZ
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: FROM_ADDRESS
tests:
- not_null:
where: TYPE <> 'SELFDESTRUCT'
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TO_ADDRESS
tests:
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
where: TO_ADDRESS IS NOT NULL
- name: IDENTIFIER
tests:
- not_null
- name: ETH_VALUE
tests:
- not_null
- name: GAS
tests:
- not_null
- name: GAS_USED
tests:
- not_null

View File

@ -1,271 +0,0 @@
{{ config (
materialized = "incremental",
unique_key = "tx_hash",
cluster_by = "BLOCK_TIMESTAMP::DATE",
tags = ['base_goerli']
) }}
WITH flat_base AS (
SELECT
t.block_number,
TO_TIMESTAMP_NTZ(
utils.udf_hex_to_int(
block_timestamp :: STRING
)
) AS block_timestamp,
t.tx_hash,
utils.udf_hex_to_int(
nonce :: STRING
) :: INTEGER AS nonce,
utils.udf_hex_to_int(
POSITION :: STRING
) :: INTEGER AS POSITION,
SUBSTR(
input,
1,
10
) AS origin_function_signature,
from_address,
to_address,
COALESCE(
utils.udf_hex_to_int(
eth_value :: STRING
) :: INTEGER / pow(
10,
18
),
0
) AS eth_value,
block_hash,
COALESCE(
utils.udf_hex_to_int(
gas_price :: STRING
) :: INTEGER,
0
) AS gas_price1,
utils.udf_hex_to_int(
gas_limit :: STRING
) :: INTEGER AS gas_limit,
input AS input_data,
utils.udf_hex_to_int(
tx_type :: STRING
) :: INTEGER AS tx_type,
is_system_tx,
object_construct_keep_null(
'chain_ID',
utils.udf_hex_to_int(
chainID :: STRING
) :: INTEGER,
'r',
r,
's',
s,
'v',
utils.udf_hex_to_int(
v :: STRING
) :: INTEGER,
'access_list',
accesslist,
'max_priority_fee_per_gas',
COALESCE(
utils.udf_hex_to_int(
max_priority_fee_per_gas :: STRING
) :: INTEGER,
0
),
'max_fee_per_gas',
COALESCE(
utils.udf_hex_to_int(
max_fee_per_gas :: STRING
) :: INTEGER,
0
),
'mint',
utils.udf_hex_to_int(
mint :: STRING
),
'source_hash',
sourcehash
) AS tx_json,
CASE
WHEN status = '0x1' THEN 'SUCCESS'
ELSE 'FAIL'
END AS tx_status,
COALESCE(utils.udf_hex_to_int(gasUsed :: STRING) :: INTEGER, 0) AS gas_used,
COALESCE(
utils.udf_hex_to_int(
cumulativeGasUsed :: STRING
) :: INTEGER,
0
) AS cumulative_gas_used,
COALESCE(
utils.udf_hex_to_int(
effectiveGasPrice
) :: INTEGER,
0
) AS effective_gas_price,
COALESCE((
l1FeeScalar :: STRING
) :: FLOAT,
0
) AS l1_fee_scalar,
COALESCE(
utils.udf_hex_to_int(
l1GasUsed :: STRING
) :: FLOAT,
0
) AS l1_gas_used,
COALESCE(
utils.udf_hex_to_int(
l1GasPrice :: STRING
) :: FLOAT,
0
) AS l1_gas_price,
COALESCE(
((gas_used * gas_price1) + (l1_gas_price * l1_gas_used * l1_fee_scalar)) / pow(
10,
18
),
0
) AS tx_fee,
t._INSERTED_TIMESTAMP
FROM
{{ ref('silver_goerli__tx_method') }}
t
JOIN {{ ref('silver_goerli__receipts_method') }}
l
ON t.tx_hash = l.tx_hash
{% if is_incremental() %}
WHERE
t._inserted_timestamp >= (
SELECT
MAX(
_inserted_timestamp
) :: DATE
FROM
{{ this }}
)
{% endif %}
),
new_records AS (
SELECT
f.block_number,
b.block_timestamp,
f.tx_hash,
f.nonce,
f.POSITION,
f.from_address,
f.to_address,
f.eth_value,
f.block_hash,
f.gas_price1 / pow(
10,
9
) AS gas_price,
f.gas_limit,
f.input_data,
f.tx_type,
f.is_system_tx,
f.tx_json,
f.tx_status,
f.gas_used,
f.cumulative_gas_used,
f.effective_gas_price,
f.l1_fee_scalar,
f.l1_gas_used,
f.l1_gas_price / pow(
10,
9
) AS l1_gas_price,
f.tx_fee,
f.origin_function_signature,
CASE
WHEN b.block_timestamp IS NULL THEN TRUE
ELSE FALSE
END AS is_pending,
f._inserted_timestamp
FROM
flat_base f
LEFT OUTER JOIN {{ ref('silver_goerli__blocks') }} b
ON f.block_number = b.block_number
)
{% if is_incremental() %},
missing_data AS (
SELECT
t.block_number,
b.block_timestamp,
t.tx_hash,
t.nonce,
t.POSITION,
t.from_address,
t.to_address,
t.eth_value,
block_hash,
t.gas_price,
t.gas_limit,
t.input_data,
t.tx_type,
t.is_system_tx,
t.tx_json,
t.tx_status,
t.gas_used,
t.cumulative_gas_used,
t.effective_gas_price,
t.l1_fee_scalar,
t.l1_gas_used,
t.l1_gas_price,
t.tx_fee,
t.origin_function_signature,
FALSE AS is_pending,
GREATEST(
t._inserted_timestamp,
b._inserted_timestamp
) AS _inserted_timestamp
FROM {{ this }} t
INNER JOIN {{ ref('silver_goerli__blocks') }} b
ON t.block_number = b.block_number
WHERE
t.is_pending
)
{% endif %}
SELECT
block_number,
block_timestamp,
tx_hash,
nonce,
POSITION,
from_address,
to_address,
eth_value,
block_hash,
gas_price,
gas_limit,
input_data,
tx_type,
is_system_tx,
tx_json,
tx_status,
gas_used,
cumulative_gas_used,
effective_gas_price,
l1_fee_scalar,
l1_gas_used,
l1_gas_price,
tx_fee,
origin_function_signature,
is_pending,
_inserted_timestamp
FROM new_records
{% if is_incremental() %}
UNION
SELECT
*
FROM missing_data
{% endif %}

View File

@ -1,124 +0,0 @@
version: 2
models:
- name: silver_goerli__transactions
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- TX_HASH
- fsc_utils.sequence_gaps:
config:
severity: warn
warn_if: ">10"
partition_by:
- BLOCK_NUMBER
column_name: POSITION
where: BLOCK_TIMESTAMP < CURRENT_DATE
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: BLOCK_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 1
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_NTZ
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: NONCE
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: POSITION
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- name: FROM_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TO_ADDRESS
tests:
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
where: TO_ADDRESS IS NOT NULL
- name: ETH_VALUE
tests:
- not_null
- name: BLOCK_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: GAS_PRICE
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: GAS_LIMIT
tests:
- not_null
- name: INPUT_DATA
tests:
- not_null
- name: TX_STATUS
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_set:
value_set: ['SUCCESS', 'FAIL']
- name: GAS_USED
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: CUMULATIVE_GAS_USED
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: TX_FEE
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: TX_JSON
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- OBJECT
- name: _INSERTED_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_LTZ
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null

View File

@ -1,4 +1,4 @@
-- depends_on: {{ ref('bronze__streamline_blocks') }}
-- depends_on: {{ ref('bronze__blocks') }}
{{ config(
materialized = 'incremental',
unique_key = "block_number",
@ -61,7 +61,7 @@ SELECT
FROM
{% if is_incremental() %}
{{ ref('bronze__streamline_blocks') }}
{{ ref('bronze__blocks') }}
WHERE
_inserted_timestamp >= (
SELECT
@ -70,7 +70,7 @@ WHERE
{{ this }}
)
{% else %}
{{ ref('bronze__streamline_fr_blocks') }}
{{ ref('bronze__blocks_fr') }}
{% endif %}
qualify(ROW_NUMBER() over (PARTITION BY block_number

View File

@ -1,4 +1,4 @@
-- depends_on: {{ ref('bronze__streamline_confirm_blocks') }}
-- depends_on: {{ ref('bronze__confirm_blocks') }}
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
@ -18,7 +18,7 @@ WITH base AS (
FROM
{% if is_incremental() %}
{{ ref('bronze__streamline_confirm_blocks') }}
{{ ref('bronze__confirm_blocks') }}
WHERE
_inserted_timestamp >= (
SELECT
@ -32,7 +32,7 @@ WHERE
{{ this }}
)
{% else %}
{{ ref('bronze__streamline_fr_confirm_blocks') }}
{{ ref('bronze__confirm_blocks_fr') }}
{% endif %}
qualify(ROW_NUMBER() over (PARTITION BY block_number

View File

@ -1,4 +1,4 @@
-- depends_on: {{ ref('bronze__streamline_receipts') }}
-- depends_on: {{ ref('bronze__receipts') }}
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
@ -18,7 +18,7 @@ WITH base AS (
FROM
{% if is_incremental() %}
{{ ref('bronze__streamline_receipts') }}
{{ ref('bronze__receipts') }}
WHERE
_inserted_timestamp >= (
SELECT
@ -28,7 +28,7 @@ WHERE
)
AND IS_OBJECT(DATA)
{% else %}
{{ ref('bronze__streamline_fr_receipts') }}
{{ ref('bronze__receipts_fr') }}
WHERE
IS_OBJECT(DATA)
{% endif %}

View File

@ -1,4 +1,4 @@
-- depends_on: {{ ref('bronze__streamline_traces') }}
-- depends_on: {{ ref('bronze__traces') }}
{{ config (
materialized = "incremental",
incremental_strategy = 'delete+insert',
@ -19,7 +19,7 @@ WITH bronze_traces AS (
FROM
{% if is_incremental() %}
{{ ref('bronze__streamline_traces') }}
{{ ref('bronze__traces') }}
WHERE
_inserted_timestamp >= (
SELECT
@ -29,7 +29,7 @@ WHERE
)
AND DATA :result IS NOT NULL
{% else %}
{{ ref('bronze__streamline_fr_traces') }}
{{ ref('bronze__traces_fr') }}
WHERE
_partition_by_block_id <= 2300000
AND DATA :result IS NOT NULL

View File

@ -1,4 +1,4 @@
-- depends_on: {{ ref('bronze__streamline_traces') }}
-- depends_on: {{ ref('bronze__traces') }}
{{ config (
materialized = "incremental",
incremental_strategy = 'delete+insert',
@ -8,7 +8,8 @@
full_refresh = false,
tags = ['core','non_realtime']
) }}
{{ fsc_evm.silver_traces_v1(
{{ silver_traces_v1(
full_reload_start_block = 2300000,
full_reload_blocks = 1000000
full_reload_blocks = 1000000,
use_partition_key = true
) }}

View File

@ -1,4 +1,4 @@
-- depends_on: {{ ref('bronze__streamline_transactions') }}
-- depends_on: {{ ref('bronze__transactions') }}
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
@ -17,7 +17,7 @@ WITH base AS (
FROM
{% if is_incremental() %}
{{ ref('bronze__streamline_transactions') }}
{{ ref('bronze__transactions') }}
WHERE
_inserted_timestamp >= (
SELECT
@ -27,7 +27,7 @@ WHERE
)
AND IS_OBJECT(DATA)
{% else %}
{{ ref('bronze__streamline_fr_transactions') }}
{{ ref('bronze__transactions_fr') }}
WHERE
IS_OBJECT(DATA)
{% endif %}

View File

@ -4,18 +4,19 @@ sources:
- name: bronze_streamline
database: streamline
schema: |
{{ "BASE_DEV" if var("STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES", False) else "BASE" }}
{{ var('GLOBAL_PROD_DB_NAME') ~ ('_dev' if var("STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES", False) else '') }}
tables:
- name: receipts
- name: blocks
- name: transactions
- name: debug_traceBlockByNumber
- name: debug_traceblockbynumber
- name: decoded_logs
- name: confirm_blocks
- name: goerli_blocks
- name: goerli_transactions
- name: goerli_debug_traceTransaction
- name: goerli_eth_getTransactionReceipt
- name: blocks_v2
- name: transactions_v2
- name: receipts_v2
- name: traces_v2
- name: confirm_blocks_v2
- name: crosschain
database: "{{ 'crosschain' if target.database == 'BASE' else 'crosschain_dev' }}"
schema: core

View File

@ -0,0 +1,39 @@
{# Set variables #}
{% set source_name = 'BLOCKS' %}
{% set source_version = 'V2' if var('GLOBAL_USES_STREAMLINE_V1', false) or var('GLOBAL_USES_BLOCKS_TRANSACTIONS_PATH', false) else '' %}
{% set model_type = '' %}
{%- set default_vars = set_default_variables_bronze(source_name, model_type) -%}
{% set partition_function = default_vars['partition_function'] %}
{% set partition_join_key = default_vars['partition_join_key'] %}
{% set balances = default_vars['balances'] %}
{% set block_number = default_vars['block_number'] %}
{% set uses_receipts_by_hash = default_vars['uses_receipts_by_hash'] %}
{# Log configuration details #}
{{ log_bronze_details(
source_name = source_name,
source_version = source_version,
model_type = model_type,
partition_function = partition_function,
partition_join_key = partition_join_key,
block_number = block_number,
uses_receipts_by_hash = uses_receipts_by_hash
) }}
{# Set up dbt configuration #}
{{ config (
materialized = 'view',
tags = ['bronze_core']
) }}
{# Main query starts here #}
{{ streamline_external_table_query(
source_name = source_name.lower(),
source_version = source_version.lower(),
partition_function = partition_function,
balances = balances,
block_number = block_number,
uses_receipts_by_hash = uses_receipts_by_hash
) }}

View File

@ -0,0 +1,41 @@
{{ config (
materialized = 'view',
tags = ['bronze_core']
) }}
SELECT
partition_key,
block_number,
VALUE,
DATA,
metadata,
file_name,
_inserted_timestamp
FROM
{{ ref('bronze__blocks_fr_v2') }}
{% if var('GLOBAL_USES_STREAMLINE_V1', false) %}
UNION ALL
SELECT
_partition_by_block_id AS partition_key,
block_number,
VALUE,
DATA,
metadata,
file_name,
_inserted_timestamp
FROM
{{ ref('bronze__blocks_fr_v1') }}
{% endif %}
{% if var('GLOBAL_USES_BLOCKS_TRANSACTIONS_PATH', false) %}
UNION ALL
SELECT
partition_key,
block_number,
VALUE,
DATA,
metadata,
file_name,
_inserted_timestamp
FROM
{{ ref('bronze__blocks_fr_v2_1') }}
{% endif %}

View File

@ -0,0 +1,40 @@
{# Set variables #}
{% set source_name = 'BLOCKS' %}
{% set source_version = '' %}
{% set model_type = 'FR' %}
{%- set default_vars = set_default_variables_bronze(source_name, model_type) -%}
{% set partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 3), '_', 1) AS INTEGER)" %}
{% set partition_join_key = '_partition_by_block_id' %}
{% set balances = default_vars['balances'] %}
{% set block_number = false %}
{% set uses_receipts_by_hash = default_vars['uses_receipts_by_hash'] %}
{# Log configuration details #}
{{ log_bronze_details(
source_name = source_name,
source_version = source_version,
model_type = model_type,
partition_function = partition_function,
partition_join_key = partition_join_key,
block_number = block_number,
uses_receipts_by_hash = uses_receipts_by_hash
) }}
{# Set up dbt configuration #}
{{ config (
materialized = 'view',
tags = ['bronze_core_streamline_v1']
) }}
{# Main query starts here #}
{{ streamline_external_table_query_fr(
source_name = source_name.lower(),
source_version = source_version.lower(),
partition_function = partition_function,
partition_join_key = partition_join_key,
balances = balances,
block_number = block_number,
uses_receipts_by_hash = uses_receipts_by_hash
) }}

View File

@ -0,0 +1,40 @@
{# Set variables #}
{% set source_name = 'BLOCKS' %}
{% set source_version = 'V2' if var('GLOBAL_USES_STREAMLINE_V1', false) else '' %}
{% set model_type = 'FR' %}
{%- set default_vars = set_default_variables_bronze(source_name, model_type) -%}
{% set partition_function = default_vars['partition_function'] %}
{% set partition_join_key = default_vars['partition_join_key'] %}
{% set balances = default_vars['balances'] %}
{% set block_number = default_vars['block_number'] %}
{% set uses_receipts_by_hash = default_vars['uses_receipts_by_hash'] %}
{# Log configuration details #}
{{ log_bronze_details(
source_name = source_name,
source_version = source_version,
model_type = model_type,
partition_function = partition_function,
partition_join_key = partition_join_key,
block_number = block_number,
uses_receipts_by_hash = uses_receipts_by_hash
) }}
{# Set up dbt configuration #}
{{ config (
materialized = 'view',
tags = ['bronze_core']
) }}
{# Main query starts here #}
{{ streamline_external_table_query_fr(
source_name = source_name.lower(),
source_version = source_version.lower(),
partition_function = partition_function,
partition_join_key = partition_join_key,
balances = balances,
block_number = block_number,
uses_receipts_by_hash = uses_receipts_by_hash
) }}

View File

@ -0,0 +1,39 @@
{# Set variables #}
{% set source_name = 'CONFIRM_BLOCKS' %}
{% set source_version = 'V2' if var('GLOBAL_USES_STREAMLINE_V1', false) else '' %}
{% set model_type = '' %}
{%- set default_vars = set_default_variables_bronze(source_name, model_type) -%}
{% set partition_function = default_vars['partition_function'] %}
{% set partition_join_key = default_vars['partition_join_key'] %}
{% set balances = default_vars['balances'] %}
{% set block_number = default_vars['block_number'] %}
{% set uses_receipts_by_hash = default_vars['uses_receipts_by_hash'] %}
{# Log configuration details #}
{{ log_bronze_details(
source_name = source_name,
source_version = source_version,
model_type = model_type,
partition_function = partition_function,
partition_join_key = partition_join_key,
block_number = block_number,
uses_receipts_by_hash = uses_receipts_by_hash
) }}
{# Set up dbt configuration #}
{{ config (
materialized = 'view',
tags = ['bronze_core']
) }}
{# Main query starts here #}
{{ streamline_external_table_query(
source_name = source_name.lower(),
source_version = source_version.lower(),
partition_function = partition_function,
balances = balances,
block_number = block_number,
uses_receipts_by_hash = uses_receipts_by_hash
) }}

View File

@ -0,0 +1,28 @@
{{ config (
materialized = 'view',
tags = ['bronze_core']
) }}
SELECT
partition_key,
block_number,
VALUE,
DATA,
metadata,
file_name,
_inserted_timestamp
FROM
{{ ref('bronze__confirm_blocks_fr_v2') }}
{% if var('GLOBAL_USES_STREAMLINE_V1', false) %}
UNION ALL
SELECT
_partition_by_block_id AS partition_key,
block_number,
VALUE,
DATA,
metadata,
file_name,
_inserted_timestamp
FROM
{{ ref('bronze__confirm_blocks_fr_v1') }}
{% endif %}

View File

@ -0,0 +1,40 @@
{# Set variables #}
{% set source_name = 'CONFIRM_BLOCKS' %}
{% set source_version = '' %}
{% set model_type = 'FR' %}
{%- set default_vars = set_default_variables_bronze(source_name, model_type) -%}
{% set partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 3), '_', 1) AS INTEGER)" %}
{% set partition_join_key = '_partition_by_block_id' %}
{% set balances = default_vars['balances'] %}
{% set block_number = false %}
{% set uses_receipts_by_hash = default_vars['uses_receipts_by_hash'] %}
{# Log configuration details #}
{{ log_bronze_details(
source_name = source_name,
source_version = source_version,
model_type = model_type,
partition_function = partition_function,
partition_join_key = partition_join_key,
block_number = block_number,
uses_receipts_by_hash = uses_receipts_by_hash
) }}
{# Set up dbt configuration #}
{{ config (
materialized = 'view',
tags = ['bronze_core_streamline_v1']
) }}
{# Main query starts here #}
{{ streamline_external_table_query_fr(
source_name = source_name.lower(),
source_version = source_version.lower(),
partition_function = partition_function,
partition_join_key = partition_join_key,
balances = balances,
block_number = block_number,
uses_receipts_by_hash = uses_receipts_by_hash
) }}

View File

@ -0,0 +1,40 @@
{# Set variables #}
{% set source_name = 'CONFIRM_BLOCKS' %}
{% set source_version = 'V2' if var('GLOBAL_USES_STREAMLINE_V1', false) else '' %}
{% set model_type = 'FR' %}
{%- set default_vars = set_default_variables_bronze(source_name, model_type) -%}
{% set partition_function = default_vars['partition_function'] %}
{% set partition_join_key = default_vars['partition_join_key'] %}
{% set balances = default_vars['balances'] %}
{% set block_number = default_vars['block_number'] %}
{% set uses_receipts_by_hash = default_vars['uses_receipts_by_hash'] %}
{# Log configuration details #}
{{ log_bronze_details(
source_name = source_name,
source_version = source_version,
model_type = model_type,
partition_function = partition_function,
partition_join_key = partition_join_key,
block_number = block_number,
uses_receipts_by_hash = uses_receipts_by_hash
) }}
{# Set up dbt configuration #}
{{ config (
materialized = 'view',
tags = ['bronze_core']
) }}
{# Main query starts here #}
{{ streamline_external_table_query_fr(
source_name = source_name.lower(),
source_version = source_version.lower(),
partition_function = partition_function,
partition_join_key = partition_join_key,
balances = balances,
block_number = block_number,
uses_receipts_by_hash = uses_receipts_by_hash
) }}

View File

@ -0,0 +1,39 @@
{# Set variables #}
{% set source_name = 'RECEIPTS' %}
{% set source_version = 'V2' if var('GLOBAL_USES_STREAMLINE_V1', false) else '' %}
{% set model_type = '' %}
{%- set default_vars = set_default_variables_bronze(source_name, model_type) -%}
{% set partition_function = default_vars['partition_function'] %}
{% set partition_join_key = default_vars['partition_join_key'] %}
{% set balances = default_vars['balances'] %}
{% set block_number = default_vars['block_number'] %}
{% set uses_receipts_by_hash = default_vars['uses_receipts_by_hash'] %}
{# Log configuration details #}
{{ log_bronze_details(
source_name = source_name,
source_version = source_version,
model_type = model_type,
partition_function = partition_function,
partition_join_key = partition_join_key,
block_number = block_number,
uses_receipts_by_hash = uses_receipts_by_hash
) }}
{# Set up dbt configuration #}
{{ config (
materialized = 'view',
tags = ['bronze_receipts']
) }}
{# Main query starts here #}
{{ streamline_external_table_query(
source_name = source_name.lower(),
source_version = source_version.lower(),
partition_function = partition_function,
balances = balances,
block_number = block_number,
uses_receipts_by_hash = uses_receipts_by_hash
) }}

View File

@ -0,0 +1,30 @@
{{ config (
materialized = 'view',
tags = ['bronze_receipts']
) }}
SELECT
partition_key,
block_number,
array_index,
VALUE,
DATA,
metadata,
file_name,
_inserted_timestamp
FROM
{{ ref('bronze__receipts_fr_v2') }}
{% if var('GLOBAL_USES_STREAMLINE_V1', false) %}
UNION ALL
SELECT
_partition_by_block_id AS partition_key,
block_number,
VALUE :"array_index" :: INT AS array_index,
VALUE,
DATA,
metadata,
file_name,
_inserted_timestamp
FROM
{{ ref('bronze__receipts_fr_v1') }}
{% endif %}

View File

@ -0,0 +1,40 @@
{# Set variables #}
{% set source_name = 'RECEIPTS' %}
{% set source_version = '' %}
{% set model_type = 'FR' %}
{%- set default_vars = set_default_variables_bronze(source_name, model_type) -%}
{% set partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 3), '_', 1) AS INTEGER)" %}
{% set partition_join_key = '_partition_by_block_id' %}
{% set balances = default_vars['balances'] %}
{% set block_number = false %}
{% set uses_receipts_by_hash = default_vars['uses_receipts_by_hash'] %}
{# Log configuration details #}
{{ log_bronze_details(
source_name = source_name,
source_version = source_version,
model_type = model_type,
partition_function = partition_function,
partition_join_key = partition_join_key,
block_number = block_number,
uses_receipts_by_hash = uses_receipts_by_hash
) }}
{# Set up dbt configuration #}
{{ config (
materialized = 'view',
tags = ['bronze_core_streamline_v1','bronze_receipts']
) }}
{# Main query starts here #}
{{ streamline_external_table_query_fr(
source_name = source_name.lower(),
source_version = source_version.lower(),
partition_function = partition_function,
partition_join_key = partition_join_key,
balances = balances,
block_number = block_number,
uses_receipts_by_hash = uses_receipts_by_hash
) }}

View File

@ -0,0 +1,40 @@
{# Set variables #}
{% set source_name = 'RECEIPTS' %}
{% set source_version = 'V2' if var('GLOBAL_USES_STREAMLINE_V1', false) else '' %}
{% set model_type = 'FR' %}
{%- set default_vars = set_default_variables_bronze(source_name, model_type) -%}
{% set partition_function = default_vars['partition_function'] %}
{% set partition_join_key = default_vars['partition_join_key'] %}
{% set balances = default_vars['balances'] %}
{% set block_number = default_vars['block_number'] %}
{% set uses_receipts_by_hash = default_vars['uses_receipts_by_hash'] %}
{# Log configuration details #}
{{ log_bronze_details(
source_name = source_name,
source_version = source_version,
model_type = model_type,
partition_function = partition_function,
partition_join_key = partition_join_key,
block_number = block_number,
uses_receipts_by_hash = uses_receipts_by_hash
) }}
{# Set up dbt configuration #}
{{ config (
materialized = 'view',
tags = ['bronze_receipts']
) }}
{# Main query starts here #}
{{ streamline_external_table_query_fr(
source_name = source_name.lower(),
source_version = source_version.lower(),
partition_function = partition_function,
partition_join_key = partition_join_key,
balances = balances,
block_number = block_number,
uses_receipts_by_hash = uses_receipts_by_hash
) }}

View File

@ -1,11 +0,0 @@
{{ config (
materialized = 'view'
) }}
{% set model = this.identifier.split("_") [-1] %}
{{ streamline_external_table_query(
model,
partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 3), '_', 1) AS INTEGER)",
partition_name = "_partition_by_block_id",
unique_key = "block_number"
) }}

View File

@ -1,9 +0,0 @@
{{ config (
materialized = 'view'
) }}
{{ streamline_external_table_query(
model = "confirm_blocks",
partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER )",
partition_name = "_partition_by_block_id",
unique_key = "block_number"
) }}

View File

@ -1,11 +0,0 @@
{{ config (
materialized = 'view'
) }}
{% set model = this.identifier.split("_") [-1] %}
{{ streamline_external_table_query(
model,
partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 3), '_', 1) AS INTEGER )",
partition_name = "_partition_by_block_id",
unique_key = "block_number"
) }}

View File

@ -1,10 +0,0 @@
{{ config (
materialized = 'view'
) }}
{{ streamline_external_table_query(
model = "debug_traceBlockByNumber",
partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 3), '_', 1) AS INTEGER )",
partition_name = "_partition_by_block_id",
unique_key = "block_number"
) }}

View File

@ -1,11 +0,0 @@
{{ config (
materialized = 'view'
) }}
{% set model = this.identifier.split("_") [-1] %}
{{ streamline_external_table_query(
model,
partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 3), '_', 1) AS INTEGER )",
partition_name = "_partition_by_block_id",
unique_key = "block_number"
) }}

View File

@ -0,0 +1,39 @@
{# Set variables #}
{% set source_name = 'TRACES' %}
{% set source_version = 'V2' if var('GLOBAL_USES_STREAMLINE_V1', false) else '' %}
{% set model_type = '' %}
{%- set default_vars = set_default_variables_bronze(source_name, model_type) -%}
{% set partition_function = default_vars['partition_function'] %}
{% set partition_join_key = default_vars['partition_join_key'] %}
{% set balances = default_vars['balances'] %}
{% set block_number = default_vars['block_number'] %}
{% set uses_receipts_by_hash = default_vars['uses_receipts_by_hash'] %}
{# Log configuration details #}
{{ log_bronze_details(
source_name = source_name,
source_version = source_version,
model_type = model_type,
partition_function = partition_function,
partition_join_key = partition_join_key,
block_number = block_number,
uses_receipts_by_hash = uses_receipts_by_hash
) }}
{# Set up dbt configuration #}
{{ config (
materialized = 'view',
tags = ['bronze_core']
) }}
{# Main query starts here #}
{{ streamline_external_table_query(
source_name = source_name.lower(),
source_version = source_version.lower(),
partition_function = partition_function,
balances = balances,
block_number = block_number,
uses_receipts_by_hash = uses_receipts_by_hash
) }}

View File

@ -0,0 +1,30 @@
{{ config (
materialized = 'view',
tags = ['bronze_core']
) }}
SELECT
partition_key,
block_number,
array_index,
VALUE,
DATA,
metadata,
file_name,
_inserted_timestamp
FROM
{{ ref('bronze__traces_fr_v2') }}
{% if var('GLOBAL_USES_STREAMLINE_V1', false) %}
UNION ALL
SELECT
_partition_by_block_id AS partition_key,
block_number,
VALUE :"array_index" :: INT AS array_index,
VALUE,
DATA,
metadata,
file_name,
_inserted_timestamp
FROM
{{ ref('bronze__traces_fr_v1') }}
{% endif %}

View File

@ -0,0 +1,40 @@
{# Set variables #}
{% set source_name = 'DEBUG_TRACEBLOCKBYNUMBER' if var('GLOBAL_USES_SINGLE_FLIGHT_METHOD',false) else 'TRACES' %}
{% set source_version = '' %}
{% set model_type = 'FR' %}
{%- set default_vars = set_default_variables_bronze(source_name, model_type) -%}
{% set partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 3), '_', 1) AS INTEGER)" %}
{% set partition_join_key = '_partition_by_block_id' %}
{% set balances = default_vars['balances'] %}
{% set block_number = false %}
{% set uses_receipts_by_hash = default_vars['uses_receipts_by_hash'] %}
{# Log configuration details #}
{{ log_bronze_details(
source_name = source_name,
source_version = source_version,
model_type = model_type,
partition_function = partition_function,
partition_join_key = partition_join_key,
block_number = block_number,
uses_receipts_by_hash = uses_receipts_by_hash
) }}
{# Set up dbt configuration #}
{{ config (
materialized = 'view',
tags = ['bronze_core_streamline_v1']
) }}
{# Main query starts here #}
{{ streamline_external_table_query_fr(
source_name = source_name.lower(),
source_version = source_version.lower(),
partition_function = partition_function,
partition_join_key = partition_join_key,
balances = balances,
block_number = block_number,
uses_receipts_by_hash = uses_receipts_by_hash
) }}

View File

@ -0,0 +1,40 @@
{# Set variables #}
{% set source_name = 'TRACES' %}
{% set source_version = 'V2' if var('GLOBAL_USES_STREAMLINE_V1', false) else '' %}
{% set model_type = 'FR' %}
{%- set default_vars = set_default_variables_bronze(source_name, model_type) -%}
{% set partition_function = default_vars['partition_function'] %}
{% set partition_join_key = default_vars['partition_join_key'] %}
{% set balances = default_vars['balances'] %}
{% set block_number = default_vars['block_number'] %}
{% set uses_receipts_by_hash = default_vars['uses_receipts_by_hash'] %}
{# Log configuration details #}
{{ log_bronze_details(
source_name = source_name,
source_version = source_version,
model_type = model_type,
partition_function = partition_function,
partition_join_key = partition_join_key,
block_number = block_number,
uses_receipts_by_hash = uses_receipts_by_hash
) }}
{# Set up dbt configuration #}
{{ config (
materialized = 'view',
tags = ['bronze_core']
) }}
{# Main query starts here #}
{{ streamline_external_table_query_fr(
source_name = source_name.lower(),
source_version = source_version.lower(),
partition_function = partition_function,
partition_join_key = partition_join_key,
balances = balances,
block_number = block_number,
uses_receipts_by_hash = uses_receipts_by_hash
) }}

View File

@ -0,0 +1,39 @@
{# Set variables #}
{% set source_name = 'TRANSACTIONS' %}
{% set source_version = 'V2' if var('GLOBAL_USES_STREAMLINE_V1', false) or var('GLOBAL_USES_BLOCKS_TRANSACTIONS_PATH', false) else '' %}
{% set model_type = '' %}
{%- set default_vars = set_default_variables_bronze(source_name, model_type) -%}
{% set partition_function = default_vars['partition_function'] %}
{% set partition_join_key = default_vars['partition_join_key'] %}
{% set balances = default_vars['balances'] %}
{% set block_number = default_vars['block_number'] %}
{% set uses_receipts_by_hash = default_vars['uses_receipts_by_hash'] %}
{# Log configuration details #}
{{ log_bronze_details(
source_name = source_name,
source_version = source_version,
model_type = model_type,
partition_function = partition_function,
partition_join_key = partition_join_key,
block_number = block_number,
uses_receipts_by_hash = uses_receipts_by_hash
) }}
{# Set up dbt configuration #}
{{ config (
materialized = 'view',
tags = ['bronze_core']
) }}
{# Main query starts here #}
{{ streamline_external_table_query(
source_name = source_name.lower(),
source_version = source_version.lower(),
partition_function = partition_function,
balances = balances,
block_number = block_number,
uses_receipts_by_hash = uses_receipts_by_hash
) }}

View File

@ -0,0 +1,41 @@
{{ config (
materialized = 'view',
tags = ['bronze_core']
) }}
SELECT
partition_key,
block_number,
VALUE,
DATA,
metadata,
file_name,
_inserted_timestamp
FROM
{{ ref('bronze__transactions_fr_v2') }}
{% if var('GLOBAL_USES_STREAMLINE_V1', false) %}
UNION ALL
SELECT
_partition_by_block_id AS partition_key,
block_number,
VALUE,
DATA,
metadata,
file_name,
_inserted_timestamp
FROM
{{ ref('bronze__transactions_fr_v1') }}
{% endif %}
{% if var('GLOBAL_USES_BLOCKS_TRANSACTIONS_PATH', false) %}
UNION ALL
SELECT
partition_key,
block_number,
VALUE,
DATA,
metadata,
file_name,
_inserted_timestamp
FROM
{{ ref('bronze__transactions_fr_v2_1') }}
{% endif %}

View File

@ -0,0 +1,40 @@
{# Set variables #}
{% set source_name = 'TRANSACTIONS' %}
{% set source_version = '' %}
{% set model_type = 'FR' %}
{%- set default_vars = set_default_variables_bronze(source_name, model_type) -%}
{% set partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 3), '_', 1) AS INTEGER)" %}
{% set partition_join_key = '_partition_by_block_id' %}
{% set balances = default_vars['balances'] %}
{% set block_number = false %}
{% set uses_receipts_by_hash = default_vars['uses_receipts_by_hash'] %}
{# Log configuration details #}
{{ log_bronze_details(
source_name = source_name,
source_version = source_version,
model_type = model_type,
partition_function = partition_function,
partition_join_key = partition_join_key,
block_number = block_number,
uses_receipts_by_hash = uses_receipts_by_hash
) }}
{# Set up dbt configuration #}
{{ config (
materialized = 'view',
tags = ['bronze_core_streamline_v1']
) }}
{# Main query starts here #}
{{ streamline_external_table_query_fr(
source_name = source_name.lower(),
source_version = source_version.lower(),
partition_function = partition_function,
partition_join_key = partition_join_key,
balances = balances,
block_number = block_number,
uses_receipts_by_hash = uses_receipts_by_hash
) }}

View File

@ -0,0 +1,40 @@
{# Set variables #}
{% set source_name = 'TRANSACTIONS' %}
{% set source_version = 'V2' if var('GLOBAL_USES_STREAMLINE_V1', false) else '' %}
{% set model_type = 'FR' %}
{%- set default_vars = set_default_variables_bronze(source_name, model_type) -%}
{% set partition_function = default_vars['partition_function'] %}
{% set partition_join_key = default_vars['partition_join_key'] %}
{% set balances = default_vars['balances'] %}
{% set block_number = default_vars['block_number'] %}
{% set uses_receipts_by_hash = default_vars['uses_receipts_by_hash'] %}
{# Log configuration details #}
{{ log_bronze_details(
source_name = source_name,
source_version = source_version,
model_type = model_type,
partition_function = partition_function,
partition_join_key = partition_join_key,
block_number = block_number,
uses_receipts_by_hash = uses_receipts_by_hash
) }}
{# Set up dbt configuration #}
{{ config (
materialized = 'view',
tags = ['bronze_core']
) }}
{# Main query starts here #}
{{ streamline_external_table_query_fr(
source_name = source_name.lower(),
source_version = source_version.lower(),
partition_function = partition_function,
partition_join_key = partition_join_key,
balances = balances,
block_number = block_number,
uses_receipts_by_hash = uses_receipts_by_hash
) }}

View File

@ -1,11 +0,0 @@
{{ config (
materialized = 'view'
) }}
{% set model = this.identifier.split("_") [-1] %}
{{ streamline_external_table_fr_query(
model,
partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 3), '_', 1) AS INTEGER)",
partition_name = "_partition_by_block_id",
unique_key = "block_number"
) }}

View File

@ -1,9 +0,0 @@
{{ config (
materialized = 'view'
) }}
{{ streamline_external_table_fr_query(
model = "confirm_blocks",
partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER )",
partition_name = "_partition_by_block_id",
unique_key = "block_number"
) }}

View File

@ -1,11 +0,0 @@
{{ config (
materialized = 'view'
) }}
{% set model = this.identifier.split("_") [-1] %}
{{ streamline_external_table_fr_query(
model,
partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 3), '_', 1) AS INTEGER )",
partition_name = "_partition_by_block_id",
unique_key = "block_number"
) }}

View File

@ -1,10 +0,0 @@
{{ config (
materialized = 'view'
) }}
{{ streamline_external_table_fr_query(
model = "debug_traceBlockByNumber",
partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 3), '_', 1) AS INTEGER )",
partition_name = "_partition_by_block_id",
unique_key = "block_number"
) }}

View File

@ -1,11 +0,0 @@
{{ config (
materialized = 'view'
) }}
{% set model = this.identifier.split("_") [-1] %}
{{ streamline_external_table_fr_query(
model,
partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 3), '_', 1) AS INTEGER )",
partition_name = "_partition_by_block_id",
unique_key = "block_number"
) }}

View File

@ -0,0 +1,50 @@
{# Set variables #}
{%- set source_name = 'BLOCKS' -%}
{%- set model_type = 'COMPLETE' -%}
{%- set full_refresh_type = var((source_name ~ '_complete_full_refresh').upper(), false) -%}
{% set post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(block_number)" %}
{# Log configuration details #}
{{ log_complete_details(
post_hook = post_hook,
full_refresh_type = full_refresh_type
) }}
{# Set up dbt configuration #}
-- depends_on: {{ ref('bronze__' ~ source_name.lower()) }}
{{ config (
materialized = "incremental",
unique_key = "block_number",
cluster_by = "ROUND(block_number, -3)",
post_hook = post_hook,
full_refresh = full_refresh_type,
tags = ['streamline_core_complete']
) }}
{# Main query starts here #}
SELECT
block_number,
file_name,
{{ dbt_utils.generate_surrogate_key(['block_number']) }} AS complete_{{ source_name.lower() }}_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp,
_inserted_timestamp,
'{{ invocation_id }}' AS _invocation_id
FROM
{% if is_incremental() %}
{{ ref('bronze__' ~ source_name.lower()) }}
WHERE
_inserted_timestamp >= (
SELECT
COALESCE(MAX(_inserted_timestamp), '1970-01-01'::TIMESTAMP) AS _inserted_timestamp
FROM
{{ this }}
)
{% else %}
{{ ref('bronze__' ~ source_name.lower() ~ '_fr') }}
{% endif %}
QUALIFY (ROW_NUMBER() OVER (PARTITION BY block_number ORDER BY _inserted_timestamp DESC)) = 1

View File

@ -1,29 +0,0 @@
-- depends_on: {{ ref('bronze__streamline_confirm_blocks') }}
{{ config (
materialized = "incremental",
unique_key = "id",
cluster_by = "ROUND(block_number, -3)",
tags = ['streamline_core_complete']
) }}
SELECT
id,
block_number,
_inserted_timestamp
FROM
{% if is_incremental() %}
{{ ref('bronze__streamline_confirm_blocks') }}
WHERE
_inserted_timestamp >= (
SELECT
COALESCE(MAX(_inserted_timestamp), '1970-01-01' :: TIMESTAMP) _inserted_timestamp
FROM
{{ this }})
{% else %}
{{ ref('bronze__streamline_fr_confirm_blocks') }}
{% endif %}
qualify(ROW_NUMBER() over (PARTITION BY id
ORDER BY
_inserted_timestamp DESC)) = 1

View File

@ -1,33 +0,0 @@
-- depends_on: {{ ref('bronze__streamline_traces') }}
{{ config (
materialized = "incremental",
unique_key = "id",
cluster_by = "ROUND(block_number, -3)",
merge_update_columns = ["id"],
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(id)",
tags = ['streamline_core_complete']
) }}
SELECT
id,
block_number,
file_name,
_inserted_timestamp
FROM
{% if is_incremental() %}
{{ ref('bronze__streamline_traces') }}
WHERE
_inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) _inserted_timestamp
FROM
{{ this }}
)
{% else %}
{{ ref('bronze__streamline_fr_traces') }}
{% endif %}
qualify(ROW_NUMBER() over (PARTITION BY id
ORDER BY
_inserted_timestamp DESC)) = 1

View File

@ -1,33 +0,0 @@
-- depends_on: {{ ref('bronze__streamline_blocks') }}
{{ config (
materialized = "incremental",
unique_key = "id",
cluster_by = "ROUND(block_number, -3)",
merge_update_columns = ["id"],
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(id)",
tags = ['streamline_core_complete']
) }}
SELECT
id,
block_number,
file_name,
_inserted_timestamp
FROM
{% if is_incremental() %}
{{ ref('bronze__streamline_blocks') }}
WHERE
_inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) _inserted_timestamp
FROM
{{ this }}
)
{% else %}
{{ ref('bronze__streamline_fr_blocks') }}
{% endif %}
qualify(ROW_NUMBER() over (PARTITION BY id
ORDER BY
_inserted_timestamp DESC)) = 1

View File

@ -0,0 +1,50 @@
{# Set variables #}
{%- set source_name = 'CONFIRM_BLOCKS' -%}
{%- set model_type = 'COMPLETE' -%}
{%- set full_refresh_type = var((source_name ~ '_complete_full_refresh').upper(), false) -%}
{% set post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(block_number)" %}
{# Log configuration details #}
{{ log_complete_details(
post_hook = post_hook,
full_refresh_type = full_refresh_type
) }}
{# Set up dbt configuration #}
-- depends_on: {{ ref('bronze__' ~ source_name.lower()) }}
{{ config (
materialized = "incremental",
unique_key = "block_number",
cluster_by = "ROUND(block_number, -3)",
post_hook = post_hook,
full_refresh = full_refresh_type,
tags = ['streamline_core_complete_confirm_blocks']
) }}
{# Main query starts here #}
SELECT
block_number,
file_name,
{{ dbt_utils.generate_surrogate_key(['block_number']) }} AS complete_{{ source_name.lower() }}_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp,
_inserted_timestamp,
'{{ invocation_id }}' AS _invocation_id
FROM
{% if is_incremental() %}
{{ ref('bronze__' ~ source_name.lower()) }}
WHERE
_inserted_timestamp >= (
SELECT
COALESCE(MAX(_inserted_timestamp), '1970-01-01'::TIMESTAMP) AS _inserted_timestamp
FROM
{{ this }}
)
{% else %}
{{ ref('bronze__' ~ source_name.lower() ~ '_fr') }}
{% endif %}
QUALIFY (ROW_NUMBER() OVER (PARTITION BY block_number ORDER BY _inserted_timestamp DESC)) = 1

View File

@ -0,0 +1,50 @@
{# Set variables #}
{%- set source_name = 'RECEIPTS' -%}
{%- set model_type = 'COMPLETE' -%}
{%- set full_refresh_type = var((source_name ~ '_complete_full_refresh').upper(), false) -%}
{% set post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(block_number)" %}
{# Log configuration details #}
{{ log_complete_details(
post_hook = post_hook,
full_refresh_type = full_refresh_type
) }}
{# Set up dbt configuration #}
-- depends_on: {{ ref('bronze__' ~ source_name.lower()) }}
{{ config (
materialized = "incremental",
unique_key = "block_number",
cluster_by = "ROUND(block_number, -3)",
post_hook = post_hook,
full_refresh = full_refresh_type,
tags = ['streamline_core_complete_receipts']
) }}
{# Main query starts here #}
SELECT
block_number,
file_name,
{{ dbt_utils.generate_surrogate_key(['block_number']) }} AS complete_{{ source_name.lower() }}_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp,
_inserted_timestamp,
'{{ invocation_id }}' AS _invocation_id
FROM
{% if is_incremental() %}
{{ ref('bronze__' ~ source_name.lower()) }}
WHERE
_inserted_timestamp >= (
SELECT
COALESCE(MAX(_inserted_timestamp), '1970-01-01'::TIMESTAMP) AS _inserted_timestamp
FROM
{{ this }}
)
{% else %}
{{ ref('bronze__' ~ source_name.lower() ~ '_fr') }}
{% endif %}
QUALIFY (ROW_NUMBER() OVER (PARTITION BY block_number ORDER BY _inserted_timestamp DESC)) = 1

View File

@ -0,0 +1,50 @@
{# Set variables #}
{%- set source_name = 'TRACES' -%}
{%- set model_type = 'COMPLETE' -%}
{%- set full_refresh_type = var((source_name ~ '_complete_full_refresh').upper(), false) -%}
{% set post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(block_number)" %}
{# Log configuration details #}
{{ log_complete_details(
post_hook = post_hook,
full_refresh_type = full_refresh_type
) }}
{# Set up dbt configuration #}
-- depends_on: {{ ref('bronze__' ~ source_name.lower()) }}
{{ config (
materialized = "incremental",
unique_key = "block_number",
cluster_by = "ROUND(block_number, -3)",
post_hook = post_hook,
full_refresh = full_refresh_type,
tags = ['streamline_core_complete']
) }}
{# Main query starts here #}
SELECT
block_number,
file_name,
{{ dbt_utils.generate_surrogate_key(['block_number']) }} AS complete_{{ source_name.lower() }}_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp,
_inserted_timestamp,
'{{ invocation_id }}' AS _invocation_id
FROM
{% if is_incremental() %}
{{ ref('bronze__' ~ source_name.lower()) }}
WHERE
_inserted_timestamp >= (
SELECT
COALESCE(MAX(_inserted_timestamp), '1970-01-01'::TIMESTAMP) AS _inserted_timestamp
FROM
{{ this }}
)
{% else %}
{{ ref('bronze__' ~ source_name.lower() ~ '_fr') }}
{% endif %}
QUALIFY (ROW_NUMBER() OVER (PARTITION BY block_number ORDER BY _inserted_timestamp DESC)) = 1

View File

@ -0,0 +1,50 @@
{# Set variables #}
{%- set source_name = 'TRANSACTIONS' -%}
{%- set model_type = 'COMPLETE' -%}
{%- set full_refresh_type = var((source_name ~ '_complete_full_refresh').upper(), false) -%}
{% set post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(block_number)"%}
{# Log configuration details #}
{{ log_complete_details(
post_hook = post_hook,
full_refresh_type = full_refresh_type
) }}
{# Set up dbt configuration #}
-- depends_on: {{ ref('bronze__' ~ source_name.lower()) }}
{{ config (
materialized = "incremental",
unique_key = "block_number",
cluster_by = "ROUND(block_number, -3)",
post_hook = post_hook,
full_refresh = full_refresh_type,
tags = ['streamline_core_complete']
) }}
{# Main query starts here #}
SELECT
block_number,
file_name,
{{ dbt_utils.generate_surrogate_key(['block_number']) }} AS complete_{{ source_name.lower() }}_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp,
_inserted_timestamp,
'{{ invocation_id }}' AS _invocation_id
FROM
{% if is_incremental() %}
{{ ref('bronze__' ~ source_name.lower()) }}
WHERE
_inserted_timestamp >= (
SELECT
COALESCE(MAX(_inserted_timestamp), '1970-01-01'::TIMESTAMP) AS _inserted_timestamp
FROM
{{ this }}
)
{% else %}
{{ ref('bronze__' ~ source_name.lower() ~ '_fr') }}
{% endif %}
QUALIFY (ROW_NUMBER() OVER (PARTITION BY block_number ORDER BY _inserted_timestamp DESC)) = 1

View File

@ -0,0 +1,112 @@
{# Set variables #}
{%- set model_name = 'BLOCKS_TRANSACTIONS' -%}
{%- set model_type = 'HISTORY' -%}
{%- set default_vars = set_default_variables_streamline(model_name, model_type) -%}
{# Set up parameters for the streamline process. These will come from the vars set in dbt_project.yml #}
{%- set streamline_params = set_streamline_parameters(
model_name=model_name,
model_type=model_type
) -%}
{%- set node_url = default_vars['node_url'] -%}
{%- set node_secret_path = default_vars['node_secret_path'] -%}
{%- set model_quantum_state = default_vars['model_quantum_state'] -%}
{%- set sql_limit = streamline_params['sql_limit'] -%}
{%- set testing_limit = default_vars['testing_limit'] -%}
{%- set order_by_clause = default_vars['order_by_clause'] -%}
{%- set new_build = default_vars['new_build'] -%}
{%- set method_params = streamline_params['method_params'] -%}
{%- set method = streamline_params['method'] -%}
{# Log configuration details #}
{{ log_streamline_details(
model_name=model_name,
model_type=model_type,
node_url=node_url,
model_quantum_state=model_quantum_state,
sql_limit=sql_limit,
testing_limit=testing_limit,
order_by_clause=order_by_clause,
new_build=new_build,
streamline_params=streamline_params,
method_params=method_params,
method=method
) }}
{# Set up dbt configuration #}
{{ config (
materialized = "view",
post_hook = fsc_utils.if_data_call_function_v2(
func = 'streamline.udf_bulk_rest_api_v2',
target = "{{this.schema}}.{{this.identifier}}",
params = streamline_params
),
tags = ['streamline_core_history']
) }}
{# Main query starts here #}
WITH
{% if not new_build %}
last_3_days AS (
SELECT block_number
FROM {{ ref("_block_lookback") }}
),
{% endif %}
{# Identify blocks that need processing #}
to_do AS (
SELECT block_number
FROM {{ ref("streamline__blocks") }}
WHERE
block_number IS NOT NULL
{% if not new_build %}
AND block_number <= (SELECT block_number FROM last_3_days)
{% endif %}
EXCEPT
SELECT block_number
FROM {{ ref("streamline__blocks_complete") }} b
INNER JOIN {{ ref("streamline__transactions_complete") }} t USING(block_number)
WHERE 1=1
{% if not new_build %}
AND block_number <= (SELECT block_number FROM last_3_days)
{% endif %}
),
ready_blocks AS (
SELECT block_number
FROM to_do
{% if testing_limit is not none %}
LIMIT {{ testing_limit }}
{% endif %}
)
{# Generate API requests for each block #}
SELECT
block_number,
ROUND(block_number, -3) AS partition_key,
live.udf_api(
'POST',
'{{ node_url }}',
OBJECT_CONSTRUCT(
'Content-Type', 'application/json',
'fsc-quantum-state', '{{ model_quantum_state }}'
),
OBJECT_CONSTRUCT(
'id', block_number,
'jsonrpc', '2.0',
'method', '{{ method }}',
'params', {{ method_params }}
),
'{{ node_secret_path }}'
) AS request
FROM
ready_blocks
{{ order_by_clause }}
LIMIT {{ sql_limit }}

View File

@ -0,0 +1,133 @@
{# Set variables #}
{%- set model_name = 'CONFIRM_BLOCKS' -%}
{%- set model_type = 'HISTORY' -%}
{%- set default_vars = set_default_variables_streamline(model_name, model_type) -%}
{# Set up parameters for the streamline process. These will come from the vars set in dbt_project.yml #}
{%- set streamline_params = set_streamline_parameters(
model_name=model_name,
model_type=model_type
) -%}
{%- set node_url = default_vars['node_url'] -%}
{%- set node_secret_path = default_vars['node_secret_path'] -%}
{%- set model_quantum_state = default_vars['model_quantum_state'] -%}
{%- set sql_limit = streamline_params['sql_limit'] -%}
{%- set testing_limit = default_vars['testing_limit'] -%}
{%- set order_by_clause = default_vars['order_by_clause'] -%}
{%- set new_build = default_vars['new_build'] -%}
{%- set method_params = streamline_params['method_params'] -%}
{%- set method = streamline_params['method'] -%}
{# Log configuration details #}
{{ log_streamline_details(
model_name=model_name,
model_type=model_type,
node_url=node_url,
model_quantum_state=model_quantum_state,
sql_limit=sql_limit,
testing_limit=testing_limit,
order_by_clause=order_by_clause,
new_build=new_build,
streamline_params=streamline_params,
method_params=method_params,
method=method
) }}
{# Set up dbt configuration #}
{{ config (
materialized = "view",
post_hook = fsc_utils.if_data_call_function_v2(
func = 'streamline.udf_bulk_rest_api_v2',
target = "{{this.schema}}.{{this.identifier}}",
params = streamline_params
),
tags = ['streamline_core_history_confirm_blocks']
) }}
{# Main query starts here #}
WITH
{% if not new_build %}
last_3_days AS (
SELECT block_number
FROM {{ ref("_block_lookback") }}
),
{% endif %}
{# Delay blocks #}
look_back AS (
SELECT
block_number
FROM
{{ ref("_max_block_by_hour") }}
qualify ROW_NUMBER() over (
ORDER BY
block_number DESC
) = 6
),
{# Identify blocks that need processing #}
to_do AS (
SELECT block_number
FROM {{ ref("streamline__blocks") }}
WHERE
block_number IS NOT NULL
AND block_number <= (SELECT block_number FROM look_back)
{% if not new_build %}
AND block_number <= (SELECT block_number FROM last_3_days)
{% endif %}
EXCEPT
{# Exclude blocks that have already been processed #}
SELECT block_number
FROM {{ ref('streamline__' ~ model_name.lower() ~ '_complete') }}
WHERE 1=1
AND block_number IS NOT NULL
AND block_number <= (SELECT block_number FROM look_back)
AND _inserted_timestamp >= DATEADD(
'day',
-4,
SYSDATE()
)
{% if not new_build %}
AND block_number <= (SELECT block_number FROM last_3_days)
{% endif %}
)
{# Prepare the final list of blocks to process #}
,ready_blocks AS (
SELECT block_number
FROM to_do
{% if testing_limit is not none %}
LIMIT {{ testing_limit }}
{% endif %}
)
{# Generate API requests for each block #}
SELECT
block_number,
ROUND(block_number, -3) AS partition_key,
live.udf_api(
'POST',
'{{ node_url }}',
OBJECT_CONSTRUCT(
'Content-Type', 'application/json',
'fsc-quantum-state', '{{ model_quantum_state }}'
),
OBJECT_CONSTRUCT(
'id', block_number,
'jsonrpc', '2.0',
'method', '{{ method }}',
'params', {{ method_params }}
),
'{{ node_secret_path }}'
) AS request
FROM
ready_blocks
{{ order_by_clause }}
LIMIT {{ sql_limit }}

View File

@ -1,72 +0,0 @@
{{ config (
materialized = "view",
post_hook = if_data_call_function(
func = "{{this.schema}}.udf_bulk_get_traces(object_construct('sql_source', '{{this.identifier}}','exploded_key','[\"result\"]', 'method', 'debug_traceBlockByNumber', 'external_table', 'debug_traceBlockByNumber', 'sql_limit', {{var('sql_limit','200000')}}, 'producer_batch_size', {{var('producer_batch_size','200000')}}, 'worker_batch_size', {{var('worker_batch_size','100000')}}, 'batch_call_limit', {{var('batch_call_limit','1')}}))",
target = "{{this.schema}}.{{this.identifier}}"
),
tags = ['streamline_core_history']
) }}
WITH last_3_days AS (
SELECT
block_number
FROM
{{ ref("_max_block_by_date") }}
qualify ROW_NUMBER() over (
ORDER BY
block_number DESC
) = 3
),
blocks AS (
SELECT
block_number
FROM
{{ ref("streamline__blocks") }}
WHERE
block_number <= (
SELECT
block_number
FROM
last_3_days
)
EXCEPT
SELECT
block_number
FROM
{{ ref("streamline__complete_debug_traceBlockByNumber") }}
WHERE
block_number <= (
SELECT
block_number
FROM
last_3_days
)
)
SELECT
PARSE_JSON(
CONCAT(
'{"jsonrpc": "2.0",',
'"method": "debug_traceBlockByNumber", "params":["',
REPLACE(
concat_ws(
'',
'0x',
to_char(
block_number :: INTEGER,
'XXXXXXXX'
)
),
' ',
''
),
'",{"tracer": "callTracer","timeout": "30s"}',
'],"id":"',
block_number :: INTEGER,
'"}'
)
) AS request
FROM
blocks
ORDER BY
block_number ASC

View File

@ -1,71 +0,0 @@
{{ config (
materialized = "view",
post_hook = if_data_call_function(
func = "{{this.schema}}.udf_bulk_json_rpc(object_construct('sql_source', '{{this.identifier}}', 'external_table', 'qn_getBlockWithReceipts', 'sql_limit', {{var('sql_limit','200000')}}, 'producer_batch_size', {{var('producer_batch_size','200000')}}, 'worker_batch_size', {{var('worker_batch_size','100000')}}, 'batch_call_limit', {{var('batch_call_limit','1')}}))",
target = "{{this.schema}}.{{this.identifier}}"
),
tags = ['streamline_core_history']
) }}
WITH last_3_days AS (
SELECT
block_number
FROM
{{ ref("_max_block_by_date") }}
qualify ROW_NUMBER() over (
ORDER BY
block_number DESC
) = 3
),
blocks AS (
SELECT
block_number :: STRING AS block_number
FROM
{{ ref("streamline__blocks") }}
WHERE
block_number <= (
SELECT
block_number
FROM
last_3_days
)
EXCEPT
SELECT
block_number :: STRING
FROM
{{ ref("streamline__complete_qn_getBlockWithReceipts") }}
WHERE
block_number <= (
SELECT
block_number
FROM
last_3_days
)
)
SELECT
PARSE_JSON(
CONCAT(
'{"jsonrpc": "2.0",',
'"method": "qn_getBlockWithReceipts", "params":["',
REPLACE(
concat_ws(
'',
'0x',
to_char(
block_number :: INTEGER,
'XXXXXXXX'
)
),
' ',
''
),
'"],"id":"',
block_number :: INTEGER,
'"}'
)
) AS request
FROM
blocks
ORDER BY
block_number ASC

View File

@ -0,0 +1,113 @@
{# Set variables #}
{%- set model_name = 'RECEIPTS' -%}
{%- set model_type = 'HISTORY' -%}
{%- set default_vars = set_default_variables_streamline(model_name, model_type) -%}
{# Set up parameters for the streamline process. These will come from the vars set in dbt_project.yml #}
{%- set streamline_params = set_streamline_parameters(
model_name=model_name,
model_type=model_type
) -%}
{%- set node_url = default_vars['node_url'] -%}
{%- set node_secret_path = default_vars['node_secret_path'] -%}
{%- set model_quantum_state = default_vars['model_quantum_state'] -%}
{%- set sql_limit = streamline_params['sql_limit'] -%}
{%- set testing_limit = default_vars['testing_limit'] -%}
{%- set order_by_clause = default_vars['order_by_clause'] -%}
{%- set new_build = default_vars['new_build'] -%}
{%- set method_params = streamline_params['method_params'] -%}
{%- set method = streamline_params['method'] -%}
{# Log configuration details #}
{{ log_streamline_details(
model_name=model_name,
model_type=model_type,
node_url=node_url,
model_quantum_state=model_quantum_state,
sql_limit=sql_limit,
testing_limit=testing_limit,
order_by_clause=order_by_clause,
new_build=new_build,
streamline_params=streamline_params,
method_params=method_params,
method=method
) }}
{# Set up dbt configuration #}
{{ config (
materialized = "view",
post_hook = fsc_utils.if_data_call_function_v2(
func = 'streamline.udf_bulk_rest_api_v2',
target = "{{this.schema}}.{{this.identifier}}",
params = streamline_params
),
tags = ['streamline_core_history_receipts']
) }}
{# Main query starts here #}
WITH
{% if not new_build %}
last_3_days AS (
SELECT block_number
FROM {{ ref("_block_lookback") }}
),
{% endif %}
{# Identify blocks that need processing #}
to_do AS (
SELECT block_number
FROM {{ ref("streamline__blocks") }}
WHERE
block_number IS NOT NULL
{% if not new_build %}
AND block_number <= (SELECT block_number FROM last_3_days)
{% endif %}
EXCEPT
{# Exclude blocks that have already been processed #}
SELECT block_number
FROM {{ ref('streamline__' ~ model_name.lower() ~ '_complete') }}
WHERE 1=1
{% if not new_build %}
AND block_number <= (SELECT block_number FROM last_3_days)
{% endif %}
)
{# Prepare the final list of blocks to process #}
,ready_blocks AS (
SELECT block_number
FROM to_do
{% if testing_limit is not none %}
LIMIT {{ testing_limit }}
{% endif %}
)
{# Generate API requests for each block #}
SELECT
block_number,
ROUND(block_number, -3) AS partition_key,
live.udf_api(
'POST',
'{{ node_url }}',
OBJECT_CONSTRUCT(
'Content-Type', 'application/json',
'fsc-quantum-state', '{{ model_quantum_state }}'
),
OBJECT_CONSTRUCT(
'id', block_number,
'jsonrpc', '2.0',
'method', '{{ method }}',
'params', {{ method_params }}
),
'{{ node_secret_path }}'
) AS request
FROM
ready_blocks
{{ order_by_clause }}
LIMIT {{ sql_limit }}

View File

@ -0,0 +1,113 @@
{# Set variables #}
{%- set model_name = 'TRACES' -%}
{%- set model_type = 'HISTORY' -%}
{%- set default_vars = set_default_variables_streamline(model_name, model_type) -%}
{# Set up parameters for the streamline process. These will come from the vars set in dbt_project.yml #}
{%- set streamline_params = set_streamline_parameters(
model_name=model_name,
model_type=model_type
) -%}
{%- set node_url = default_vars['node_url'] -%}
{%- set node_secret_path = default_vars['node_secret_path'] -%}
{%- set model_quantum_state = default_vars['model_quantum_state'] -%}
{%- set sql_limit = streamline_params['sql_limit'] -%}
{%- set testing_limit = default_vars['testing_limit'] -%}
{%- set order_by_clause = default_vars['order_by_clause'] -%}
{%- set new_build = default_vars['new_build'] -%}
{%- set method_params = streamline_params['method_params'] -%}
{%- set method = streamline_params['method'] -%}
{# Log configuration details #}
{{ log_streamline_details(
model_name=model_name,
model_type=model_type,
node_url=node_url,
model_quantum_state=model_quantum_state,
sql_limit=sql_limit,
testing_limit=testing_limit,
order_by_clause=order_by_clause,
new_build=new_build,
streamline_params=streamline_params,
method_params=method_params,
method=method
) }}
{# Set up dbt configuration #}
{{ config (
materialized = "view",
post_hook = fsc_utils.if_data_call_function_v2(
func = 'streamline.udf_bulk_rest_api_v2',
target = "{{this.schema}}.{{this.identifier}}",
params = streamline_params
),
tags = ['streamline_core_history']
) }}
{# Main query starts here #}
WITH
{% if not new_build %}
last_3_days AS (
SELECT block_number
FROM {{ ref("_block_lookback") }}
),
{% endif %}
{# Identify blocks that need processing #}
to_do AS (
SELECT block_number
FROM {{ ref("streamline__blocks") }}
WHERE
block_number IS NOT NULL
{% if not new_build %}
AND block_number <= (SELECT block_number FROM last_3_days)
{% endif %}
EXCEPT
{# Exclude blocks that have already been processed #}
SELECT block_number
FROM {{ ref('streamline__' ~ model_name.lower() ~ '_complete') }}
WHERE 1=1
{% if not new_build %}
AND block_number <= (SELECT block_number FROM last_3_days)
{% endif %}
)
{# Prepare the final list of blocks to process #}
,ready_blocks AS (
SELECT block_number
FROM to_do
{% if testing_limit is not none %}
LIMIT {{ testing_limit }}
{% endif %}
)
{# Generate API requests for each block #}
SELECT
block_number,
ROUND(block_number, -3) AS partition_key,
live.udf_api(
'POST',
'{{ node_url }}',
OBJECT_CONSTRUCT(
'Content-Type', 'application/json',
'fsc-quantum-state', '{{ model_quantum_state }}'
),
OBJECT_CONSTRUCT(
'id', block_number,
'jsonrpc', '2.0',
'method', '{{ method }}',
'params', {{ method_params }}
),
'{{ node_secret_path }}'
) AS request
FROM
ready_blocks
{{ order_by_clause }}
LIMIT {{ sql_limit }}

View File

@ -0,0 +1,126 @@
{# Set variables #}
{%- set model_name = 'BLOCKS_TRANSACTIONS' -%}
{%- set model_type = 'REALTIME' -%}
{%- set min_block = var('GLOBAL_START_UP_BLOCK', none) -%}
{%- set default_vars = set_default_variables_streamline(model_name, model_type) -%}
{# Set up parameters for the streamline process. These will come from the vars set in dbt_project.yml #}
{%- set streamline_params = set_streamline_parameters(
model_name=model_name,
model_type=model_type
) -%}
{%- set node_url = default_vars['node_url'] -%}
{%- set node_secret_path = default_vars['node_secret_path'] -%}
{%- set model_quantum_state = default_vars['model_quantum_state'] -%}
{%- set sql_limit = streamline_params['sql_limit'] -%}
{%- set testing_limit = default_vars['testing_limit'] -%}
{%- set order_by_clause = default_vars['order_by_clause'] -%}
{%- set new_build = default_vars['new_build'] -%}
{%- set method_params = streamline_params['method_params'] -%}
{%- set method = streamline_params['method'] -%}
{# Log configuration details #}
{{ log_streamline_details(
model_name=model_name,
model_type=model_type,
node_url=node_url,
model_quantum_state=model_quantum_state,
sql_limit=sql_limit,
testing_limit=testing_limit,
order_by_clause=order_by_clause,
new_build=new_build,
streamline_params=streamline_params,
method_params=method_params,
method=method,
min_block=min_block
) }}
{# Set up dbt configuration #}
{{ config (
materialized = "view",
post_hook = fsc_utils.if_data_call_function_v2(
func = 'streamline.udf_bulk_rest_api_v2',
target = "{{this.schema}}.{{this.identifier}}",
params = streamline_params
),
tags = ['streamline_core_realtime']
) }}
{# Main query starts here #}
WITH
{% if not new_build %}
last_3_days AS (
SELECT block_number
FROM {{ ref("_block_lookback") }}
),
{% endif %}
{# Identify blocks that need processing #}
to_do AS (
SELECT block_number
FROM {{ ref("streamline__blocks") }}
WHERE
block_number IS NOT NULL
{% if not new_build %}
AND block_number >= (SELECT block_number FROM last_3_days)
{% endif %}
{% if min_block is not none %}
AND block_number >= {{ min_block }}
{% endif %}
EXCEPT
SELECT block_number
FROM {{ ref("streamline__blocks_complete") }} b
INNER JOIN {{ ref("streamline__transactions_complete") }} t USING(block_number)
WHERE 1=1
{% if not new_build %}
AND block_number >= (SELECT block_number FROM last_3_days)
{% endif %}
),
ready_blocks AS (
SELECT block_number
FROM to_do
{% if not new_build %}
UNION
SELECT block_number
FROM {{ ref("_unconfirmed_blocks") }}
UNION
SELECT block_number
FROM {{ ref("_missing_txs") }}
{% endif %}
{% if testing_limit is not none %}
LIMIT {{ testing_limit }}
{% endif %}
)
{# Generate API requests for each block #}
SELECT
block_number,
ROUND(block_number, -3) AS partition_key,
live.udf_api(
'POST',
'{{ node_url }}',
OBJECT_CONSTRUCT(
'Content-Type', 'application/json',
'fsc-quantum-state', '{{ model_quantum_state }}'
),
OBJECT_CONSTRUCT(
'id', block_number,
'jsonrpc', '2.0',
'method', '{{ method }}',
'params', {{ method_params }}
),
'{{ node_secret_path }}'
) AS request
FROM
ready_blocks
{{ order_by_clause }}
LIMIT {{ sql_limit }}

View File

@ -1,100 +1,138 @@
{{ config (
materialized = "view",
post_hook = if_data_call_function(
func = "{{this.schema}}.udf_bulk_json_rpc(object_construct('sql_source', '{{this.identifier}}', 'external_table', 'confirm_blocks', 'sql_limit', {{var('sql_limit','2000')}}, 'producer_batch_size', {{var('producer_batch_size','2000')}}, 'worker_batch_size', {{var('worker_batch_size','1000')}}, 'batch_call_limit', {{var('batch_call_limit','3')}}))",
target = "{{this.schema}}.{{this.identifier}}"
),
tags = ['streamline_core_realtime']
{# Set variables #}
{%- set model_name = 'CONFIRM_BLOCKS' -%}
{%- set model_type = 'REALTIME' -%}
{%- set min_block = var('GLOBAL_START_UP_BLOCK', none) -%}
{%- set default_vars = set_default_variables_streamline(model_name, model_type) -%}
{# Set up parameters for the streamline process. These will come from the vars set in dbt_project.yml #}
{%- set streamline_params = set_streamline_parameters(
model_name=model_name,
model_type=model_type
) -%}
{%- set node_url = default_vars['node_url'] -%}
{%- set node_secret_path = default_vars['node_secret_path'] -%}
{%- set model_quantum_state = default_vars['model_quantum_state'] -%}
{%- set sql_limit = streamline_params['sql_limit'] -%}
{%- set testing_limit = default_vars['testing_limit'] -%}
{%- set order_by_clause = default_vars['order_by_clause'] -%}
{%- set new_build = default_vars['new_build'] -%}
{%- set method_params = streamline_params['method_params'] -%}
{%- set method = streamline_params['method'] -%}
{# Log configuration details #}
{{ log_streamline_details(
model_name=model_name,
model_type=model_type,
node_url=node_url,
model_quantum_state=model_quantum_state,
sql_limit=sql_limit,
testing_limit=testing_limit,
order_by_clause=order_by_clause,
new_build=new_build,
streamline_params=streamline_params,
method_params=method_params,
method=method,
min_block=min_block
) }}
WITH last_3_days AS (
{# Set up dbt configuration #}
{{ config (
materialized = "view",
post_hook = fsc_utils.if_data_call_function_v2(
func = 'streamline.udf_bulk_rest_api_v2',
target = "{{this.schema}}.{{this.identifier}}",
params = streamline_params
),
tags = ['streamline_core_realtime_confirm_blocks']
) }}
{# Main query starts here #}
WITH
{% if not new_build %}
last_3_days AS (
SELECT block_number
FROM {{ ref("_block_lookback") }}
),
{% endif %}
{# Delay blocks #}
look_back AS (
SELECT
block_number
FROM
{{ ref("_block_lookback") }}
),
last_3_days AS (
SELECT
block_number
FROM
{{ ref("_max_block_by_date") }}
{{ ref("_max_block_by_hour") }}
qualify ROW_NUMBER() over (
ORDER BY
block_number DESC
) = 3
),
tbl AS (
SELECT
block_number
FROM
{{ ref("streamline__blocks") }}
WHERE
(
block_number >= (
SELECT
block_number
FROM
last_3_days
)
)
AND block_number >= (
SELECT
block_number
FROM
last_3_days
)
) = 6
),
{# Identify blocks that need processing #}
to_do AS (
SELECT block_number
FROM {{ ref("streamline__blocks") }}
WHERE
block_number IS NOT NULL
AND block_number <= (SELECT block_number FROM look_back)
{% if not new_build %}
AND block_number >= (SELECT block_number FROM last_3_days)
{% endif %}
{% if min_block is not none %}
AND block_number >= {{ min_block }}
{% endif %}
EXCEPT
SELECT
block_number
FROM
{{ ref("streamline__complete_confirmed_blocks") }}
WHERE
(
block_number >= (
SELECT
block_number
FROM
last_3_days
)
)
{# Exclude blocks that have already been processed #}
SELECT block_number
FROM {{ ref('streamline__' ~ model_name.lower() ~ '_complete') }}
WHERE 1=1
AND block_number IS NOT NULL
AND block_number <= (SELECT block_number FROM look_back)
AND _inserted_timestamp >= DATEADD(
'day',
-4,
SYSDATE()
)
AND block_number >= (
SELECT
block_number
FROM
last_3_days
)
{% if not new_build %}
AND block_number >= (SELECT block_number FROM last_3_days)
{% endif %}
)
{# Prepare the final list of blocks to process #}
,ready_blocks AS (
SELECT block_number
FROM to_do
{% if testing_limit is not none %}
LIMIT {{ testing_limit }}
{% endif %}
)
{# Generate API requests for each block #}
SELECT
PARSE_JSON(
CONCAT(
'{"jsonrpc": "2.0",',
'"method": "eth_getBlockByNumber", "params":["',
REPLACE(
concat_ws(
'',
'0x',
to_char(
block_number :: INTEGER,
'XXXXXXXX'
)
),
' ',
''
),
'", false],"id":"',
block_number :: INTEGER,
'"}'
)
block_number,
ROUND(block_number, -3) AS partition_key,
live.udf_api(
'POST',
'{{ node_url }}',
OBJECT_CONSTRUCT(
'Content-Type', 'application/json',
'fsc-quantum-state', '{{ model_quantum_state }}'
),
OBJECT_CONSTRUCT(
'id', block_number,
'jsonrpc', '2.0',
'method', '{{ method }}',
'params', {{ method_params }}
),
'{{ node_secret_path }}'
) AS request
FROM
tbl
ORDER BY
block_number ASC
LIMIT
20000
ready_blocks
{{ order_by_clause }}
LIMIT {{ sql_limit }}

View File

@ -1,100 +0,0 @@
{{ config (
materialized = "view",
post_hook = if_data_call_function(
func = "{{this.schema}}.udf_bulk_get_traces(object_construct('sql_source', '{{this.identifier}}', 'external_table', 'debug_traceBlockByNumber', 'sql_limit', {{var('sql_limit','4000')}}, 'producer_batch_size', {{var('producer_batch_size','4000')}}, 'worker_batch_size', {{var('worker_batch_size','1000')}}, 'batch_call_limit', {{var('batch_call_limit','1')}}))",
target = "{{this.schema}}.{{this.identifier}}"
),
tags = ['streamline_core_realtime']
) }}
WITH last_3_days AS (
SELECT
block_number
FROM
{{ ref("_block_lookback") }}
),
blocks AS (
SELECT
block_number
FROM
{{ ref("streamline__blocks") }}
WHERE
(
block_number >= (
SELECT
block_number
FROM
last_3_days
)
)
EXCEPT
SELECT
block_number
FROM
{{ ref("streamline__complete_debug_traceBlockByNumber") }}
WHERE
(
block_number >= (
SELECT
block_number
FROM
last_3_days
)
)
AND _inserted_timestamp >= DATEADD(
'day',
-4,
SYSDATE()
)
),
all_blocks AS (
SELECT
block_number
FROM
blocks
UNION
SELECT
block_number
FROM
(
SELECT
block_number
FROM
{{ ref("_missing_traces") }}
UNION
SELECT
block_number
FROM
{{ ref("_unconfirmed_blocks") }}
)
)
SELECT
PARSE_JSON(
CONCAT(
'{"jsonrpc": "2.0",',
'"method": "debug_traceBlockByNumber", "params":["',
REPLACE(
concat_ws(
'',
'0x',
to_char(
block_number :: INTEGER,
'XXXXXXXX'
)
),
' ',
''
),
'",{"tracer": "callTracer", "timeout": "30s"}',
'],"id":"',
block_number :: INTEGER,
'"}'
)
) AS request
FROM
all_blocks
ORDER BY
block_number ASC
LIMIT
10000

View File

@ -1,104 +0,0 @@
{{ config (
materialized = "view",
post_hook = if_data_call_function(
func = "{{this.schema}}.udf_bulk_json_rpc(object_construct('sql_source', '{{this.identifier}}', 'external_table', 'qn_getBlockWithReceipts', 'sql_limit', {{var('sql_limit','2000')}}, 'producer_batch_size', {{var('producer_batch_size','2000')}}, 'worker_batch_size', {{var('worker_batch_size','1000')}}, 'batch_call_limit', {{var('batch_call_limit','1')}}))",
target = "{{this.schema}}.{{this.identifier}}"
),
tags = ['streamline_core_realtime']
) }}
WITH last_3_days AS (
SELECT
block_number
FROM
{{ ref("_block_lookback") }}
),
blocks AS (
SELECT
block_number :: STRING AS block_number
FROM
{{ ref("streamline__blocks") }}
WHERE
(
block_number >= (
SELECT
block_number
FROM
last_3_days
)
)
EXCEPT
SELECT
block_number :: STRING
FROM
{{ ref("streamline__complete_qn_getBlockWithReceipts") }}
WHERE
(
block_number >= (
SELECT
block_number
FROM
last_3_days
)
)
AND _inserted_timestamp >= DATEADD(
'day',
-4,
SYSDATE()
)
),
all_blocks AS (
SELECT
block_number
FROM
blocks
UNION
SELECT
block_number
FROM
(
SELECT
block_number
FROM
{{ ref("_missing_receipts") }}
UNION
SELECT
block_number
FROM
{{ ref("_missing_txs") }}
UNION
SELECT
block_number
FROM
{{ ref("_unconfirmed_blocks") }}
)
)
SELECT
PARSE_JSON(
CONCAT(
'{"jsonrpc": "2.0",',
'"method": "qn_getBlockWithReceipts", "params":["',
REPLACE(
concat_ws(
'',
'0x',
to_char(
block_number :: INTEGER,
'XXXXXXXX'
)
),
' ',
''
),
'"],"id":"',
block_number :: INTEGER,
'"}'
)
) AS request
FROM
all_blocks
ORDER BY
block_number ASC
LIMIT
10000

View File

@ -0,0 +1,130 @@
{# Set variables #}
{%- set model_name = 'RECEIPTS' -%}
{%- set model_type = 'REALTIME' -%}
{%- set min_block = var('GLOBAL_START_UP_BLOCK', none) -%}
{%- set default_vars = set_default_variables_streamline(model_name, model_type) -%}
{# Set up parameters for the streamline process. These will come from the vars set in dbt_project.yml #}
{%- set streamline_params = set_streamline_parameters(
model_name=model_name,
model_type=model_type
) -%}
{%- set node_url = default_vars['node_url'] -%}
{%- set node_secret_path = default_vars['node_secret_path'] -%}
{%- set model_quantum_state = default_vars['model_quantum_state'] -%}
{%- set sql_limit = streamline_params['sql_limit'] -%}
{%- set testing_limit = default_vars['testing_limit'] -%}
{%- set order_by_clause = default_vars['order_by_clause'] -%}
{%- set new_build = default_vars['new_build'] -%}
{%- set method_params = streamline_params['method_params'] -%}
{%- set method = streamline_params['method'] -%}
{# Log configuration details #}
{{ log_streamline_details(
model_name=model_name,
model_type=model_type,
node_url=node_url,
model_quantum_state=model_quantum_state,
sql_limit=sql_limit,
testing_limit=testing_limit,
order_by_clause=order_by_clause,
new_build=new_build,
streamline_params=streamline_params,
method_params=method_params,
method=method,
min_block=min_block
) }}
{# Set up dbt configuration #}
{{ config (
materialized = "view",
post_hook = fsc_utils.if_data_call_function_v2(
func = 'streamline.udf_bulk_rest_api_v2',
target = "{{this.schema}}.{{this.identifier}}",
params = streamline_params
),
tags = ['streamline_core_realtime_receipts']
) }}
{# Main query starts here #}
WITH
{% if not new_build %}
last_3_days AS (
SELECT block_number
FROM {{ ref("_block_lookback") }}
),
{% endif %}
{# Identify blocks that need processing #}
to_do AS (
SELECT block_number
FROM {{ ref("streamline__blocks") }}
WHERE
block_number IS NOT NULL
{% if not new_build %}
AND block_number >= (SELECT block_number FROM last_3_days)
{% endif %}
{% if min_block is not none %}
AND block_number >= {{ min_block }}
{% endif %}
EXCEPT
{# Exclude blocks that have already been processed #}
SELECT block_number
FROM {{ ref('streamline__' ~ model_name.lower() ~ '_complete') }}
WHERE 1=1
{% if not new_build %}
AND block_number >= (SELECT block_number FROM last_3_days)
{% endif %}
)
{# Prepare the final list of blocks to process #}
,ready_blocks AS (
SELECT block_number
FROM to_do
{% if not new_build %}
UNION
SELECT block_number
FROM {{ ref("_unconfirmed_blocks") }}
UNION
SELECT block_number
FROM {{ ref("_missing_txs") }}
UNION
SELECT block_number
FROM {{ ref("_missing_receipts") }}
{% endif %}
{% if testing_limit is not none %}
LIMIT {{ testing_limit }}
{% endif %}
)
{# Generate API requests for each block #}
SELECT
block_number,
ROUND(block_number, -3) AS partition_key,
live.udf_api(
'POST',
'{{ node_url }}',
OBJECT_CONSTRUCT(
'Content-Type', 'application/json',
'fsc-quantum-state', '{{ model_quantum_state }}'
),
OBJECT_CONSTRUCT(
'id', block_number,
'jsonrpc', '2.0',
'method', '{{ method }}',
'params', {{ method_params }}
),
'{{ node_secret_path }}'
) AS request
FROM
ready_blocks
{{ order_by_clause }}
LIMIT {{ sql_limit }}

View File

@ -0,0 +1,127 @@
{# Set variables #}
{%- set model_name = 'TRACES' -%}
{%- set model_type = 'REALTIME' -%}
{%- set min_block = var('GLOBAL_START_UP_BLOCK', none) -%}
{%- set default_vars = set_default_variables_streamline(model_name, model_type) -%}
{# Set up parameters for the streamline process. These will come from the vars set in dbt_project.yml #}
{%- set streamline_params = set_streamline_parameters(
model_name=model_name,
model_type=model_type
) -%}
{%- set node_url = default_vars['node_url'] -%}
{%- set node_secret_path = default_vars['node_secret_path'] -%}
{%- set model_quantum_state = default_vars['model_quantum_state'] -%}
{%- set sql_limit = streamline_params['sql_limit'] -%}
{%- set testing_limit = default_vars['testing_limit'] -%}
{%- set order_by_clause = default_vars['order_by_clause'] -%}
{%- set new_build = default_vars['new_build'] -%}
{%- set method_params = streamline_params['method_params'] -%}
{%- set method = streamline_params['method'] -%}
{# Log configuration details #}
{{ log_streamline_details(
model_name=model_name,
model_type=model_type,
node_url=node_url,
model_quantum_state=model_quantum_state,
sql_limit=sql_limit,
testing_limit=testing_limit,
order_by_clause=order_by_clause,
new_build=new_build,
streamline_params=streamline_params,
method_params=method_params,
method=method,
min_block=min_block
) }}
{# Set up dbt configuration #}
{{ config (
materialized = "view",
post_hook = fsc_utils.if_data_call_function_v2(
func = 'streamline.udf_bulk_rest_api_v2',
target = "{{this.schema}}.{{this.identifier}}",
params = streamline_params
),
tags = ['streamline_core_realtime']
) }}
{# Main query starts here #}
WITH
{% if not new_build %}
last_3_days AS (
SELECT block_number
FROM {{ ref("_block_lookback") }}
),
{% endif %}
{# Identify blocks that need processing #}
to_do AS (
SELECT block_number
FROM {{ ref("streamline__blocks") }}
WHERE
block_number IS NOT NULL
{% if not new_build %}
AND block_number >= (SELECT block_number FROM last_3_days)
{% endif %}
{% if min_block is not none %}
AND block_number >= {{ min_block }}
{% endif %}
EXCEPT
{# Exclude blocks that have already been processed #}
SELECT block_number
FROM {{ ref('streamline__' ~ model_name.lower() ~ '_complete') }}
WHERE 1=1
{% if not new_build %}
AND block_number >= (SELECT block_number FROM last_3_days)
{% endif %}
)
{# Prepare the final list of blocks to process #}
,ready_blocks AS (
SELECT block_number
FROM to_do
{% if not new_build %}
UNION
SELECT block_number
FROM {{ ref("_unconfirmed_blocks") }}
UNION
SELECT block_number
FROM {{ ref("_missing_traces") }}
{% endif %}
{% if testing_limit is not none %}
LIMIT {{ testing_limit }}
{% endif %}
)
{# Generate API requests for each block #}
SELECT
block_number,
ROUND(block_number, -3) AS partition_key,
live.udf_api(
'POST',
'{{ node_url }}',
OBJECT_CONSTRUCT(
'Content-Type', 'application/json',
'fsc-quantum-state', '{{ model_quantum_state }}'
),
OBJECT_CONSTRUCT(
'id', block_number,
'jsonrpc', '2.0',
'method', '{{ method }}',
'params', {{ method_params }}
),
'{{ node_secret_path }}'
) AS request
FROM
ready_blocks
{{ order_by_clause }}
LIMIT {{ sql_limit }}

View File

@ -1,20 +1,36 @@
{%- if flags.WHICH == 'compile' and execute -%}
{% set config_log = '\n' %}
{% set config_log = config_log ~ '\n=== DBT Model Config ===\n'%}
{% set config_log = config_log ~ '\n{{ config (\n' %}
{% set config_log = config_log ~ ' materialized = "' ~ config.get('materialized') ~ '",\n' %}
{% set config_log = config_log ~ ' tags = ' ~ config.get('tags') | tojson ~ '\n' %}
{% set config_log = config_log ~ ') }}\n' %}
{{ log(config_log, info=True) }}
{{ log("", info=True) }}
{%- endif -%}
{{ config (
materialized = "view",
tags = ['streamline_core_complete']
) }}
{% if execute %}
{% set height = run_query('SELECT streamline.udf_get_chainhead()') %}
{% set block_height = height.columns [0].values() [0] %}
{% else %}
{% set block_height = 0 %}
{% endif %}
SELECT
_id AS block_number
_id,
(
({{ var('GLOBAL_BLOCKS_PER_HOUR',0) }} / 60) * {{ var('GLOBAL_CHAINHEAD_DELAY',3) }}
) :: INT AS block_number_delay, --minute-based block delay
(_id - block_number_delay) :: INT AS block_number,
utils.udf_int_to_hex(block_number) AS block_number_hex
FROM
{{ ref("silver__number_sequence") }}
{{ ref('silver__number_sequence') }}
WHERE
_id <= {{ block_height }}
ORDER BY
_id ASC
_id <= (
SELECT
COALESCE(
block_number,
0
)
FROM
{{ ref("streamline__get_chainhead") }}
)

View File

@ -0,0 +1,54 @@
{%- set model_quantum_state = var('CHAINHEAD_QUANTUM_STATE', 'livequery') -%}
{%- set node_url = var('GLOBAL_NODE_URL', '{Service}/{Authentication}') -%}
{%- if flags.WHICH == 'compile' and execute -%}
{{ log("=== Current Variable Settings ===", info=True) }}
{{ log("CHAINHEAD_QUANTUM_STATE: " ~ model_quantum_state, info=True) }}
{{ log("", info=True) }}
{{ log("=== API Details ===", info=True) }}
{{ log("NODE_URL: " ~ node_url, info=True) }}
{{ log("NODE_SECRET_PATH: " ~ var('GLOBAL_NODE_SECRET_PATH'), info=True) }}
{{ log("", info=True) }}
{% set config_log = '\n' %}
{% set config_log = config_log ~ '\n=== DBT Model Config ===\n'%}
{% set config_log = config_log ~ '\n{{ config (\n' %}
{% set config_log = config_log ~ ' materialized = "' ~ config.get('materialized') ~ '",\n' %}
{% set config_log = config_log ~ ' tags = ' ~ config.get('tags') | tojson ~ '\n' %}
{% set config_log = config_log ~ ') }}\n' %}
{{ log(config_log, info=True) }}
{{ log("", info=True) }}
{%- endif -%}
{{ config (
materialized = 'table',
tags = ['streamline_core_complete','chainhead']
) }}
SELECT
live.udf_api(
'POST',
'{{ node_url }}',
OBJECT_CONSTRUCT(
'Content-Type', 'application/json',
'fsc-quantum-state', '{{ model_quantum_state }}'
),
OBJECT_CONSTRUCT(
'id',
0,
'jsonrpc',
'2.0',
'method',
'eth_blockNumber',
'params',
[]
),
'{{ var('GLOBAL_NODE_SECRET_PATH') }}'
) AS resp,
utils.udf_hex_to_int(
resp :data :result :: STRING
) AS block_number

View File

@ -0,0 +1,9 @@
version: 2
models:
- name: streamline__get_chainhead
description: "This model is used to get the chainhead from the blockchain."
columns:
- name: BLOCK_NUMBER
tests:
- not_null