From e46327a3e94b32c9da669dde82d3cac1eef1ba5f Mon Sep 17 00:00:00 2001 From: drethereum <71602799+drethereum@users.noreply.github.com> Date: Wed, 12 Mar 2025 16:34:06 -0600 Subject: [PATCH] AN-5796/sl2-upgrade-blast (#102) * initial model updates * bronze and sources * integration test * revert integration --- .../dbt_run_streamline_chainhead.yml | 6 +- .../dbt_run_streamline_history_adhoc.yml | 2 +- dbt_project.yml | 62 +++++- macros/decoder/decoded_logs_history.sql | 33 ++- macros/decoder/run_decoded_logs_history.sql | 2 - ...eamline_external_table_queries_decoder.sql | 101 +++++++++ .../streamline_external_table_queries.sql | 141 ++++++++++++ .../main_package/logging/bronze.sql | 36 ++++ .../main_package/logging/complete.sql | 29 +++ .../main_package/logging/logging.sql | 36 ++++ .../main_package/logging/requests.sql | 55 +++++ .../streamline/set_default_variables.sql | 47 ++++ .../streamline/set_streamline_parameters.sql | 57 +++++ macros/streamline/models.sql | 186 ---------------- .../api_udf/bronze_api__contract_abis.sql | 13 +- models/gold/core/core__fact_traces.sql | 2 +- models/silver/core/silver__blocks.sql | 52 ++--- .../silver/core/silver__confirmed_blocks.sql | 6 +- models/silver/core/silver__decoded_logs.sql | 2 +- models/silver/core/silver__receipts.sql | 6 +- models/silver/core/silver__traces.sql | 15 +- models/silver/core/silver__transactions.sql | 6 +- models/sources.yml | 2 + .../streamline/bronze/core/bronze__blocks.sql | 39 ++++ .../bronze/core/bronze__blocks_fr.sql | 26 +++ .../bronze/core/bronze__blocks_fr_v1.sql | 40 ++++ .../bronze/core/bronze__blocks_fr_v2.sql | 34 +++ .../bronze/core/bronze__confirm_blocks.sql | 39 ++++ .../bronze/core/bronze__confirm_blocks_fr.sql | 15 ++ .../core/bronze__confirm_blocks_fr_v2.sql | 40 ++++ .../bronze/core/bronze__receipts.sql | 39 ++++ .../bronze/core/bronze__receipts_fr.sql | 16 ++ .../bronze/core/bronze__receipts_fr_v2.sql | 40 ++++ .../bronze/core/bronze__streamline_blocks.sql | 8 - .../bronze__streamline_confirm_blocks.sql | 8 - .../core/bronze__streamline_receipts.sql | 8 - .../bronze/core/bronze__streamline_traces.sql | 7 - .../core/bronze__streamline_transactions.sql | 8 - .../streamline/bronze/core/bronze__traces.sql | 39 ++++ .../bronze/core/bronze__traces_fr.sql | 16 ++ .../bronze/core/bronze__traces_fr_v2.sql | 40 ++++ .../bronze/core/bronze__transactions.sql | 39 ++++ .../bronze/core/bronze__transactions_fr.sql | 26 +++ .../core/bronze__transactions_fr_v1.sql | 40 ++++ .../core/bronze__transactions_fr_v2.sql | 34 +++ .../core/fr/bronze__streamline_fr_blocks.sql | 8 - .../bronze__streamline_fr_confirm_blocks.sql | 8 - .../fr/bronze__streamline_fr_receipts.sql | 8 - .../core/fr/bronze__streamline_fr_traces.sql | 7 - .../fr/bronze__streamline_fr_transactions.sql | 8 - .../bronze/decoder/bronze__decoded_logs.sql | 58 ++--- .../decoder/bronze__decoded_logs_fr.sql | 13 ++ .../decoder/bronze__decoded_logs_fr_v2.sql | 23 ++ .../decoder/bronze__fr_decoded_logs.sql | 40 ---- models/streamline/silver/_block_lookback.sql | 2 +- .../streamline/silver/_max_block_by_date.sql | 2 +- .../complete/streamline__blocks_complete.sql | 50 +++++ .../complete/streamline__complete_blocks.sql | 42 ---- .../streamline__complete_confirmed_blocks.sql | 40 ---- .../streamline__complete_receipts.sql | 42 ---- .../complete/streamline__complete_traces.sql | 42 ---- .../streamline__complete_transactions.sql | 42 ---- .../streamline__confirm_blocks_complete.sql | 50 +++++ .../streamline__receipts_complete.sql | 50 +++++ .../complete/streamline__traces_complete.sql | 50 +++++ .../streamline__transactions_complete.sql | 50 +++++ .../history/streamline__blocks_history.sql | 74 ------- ...treamline__blocks_transactions_history.sql | 112 ++++++++++ .../streamline__confirm_blocks_history.sql | 133 ++++++++++++ .../history/streamline__receipts_history.sql | 154 ++++++++----- .../history/streamline__traces_history.sql | 147 ++++++++----- .../streamline__transactions_history.sql | 75 ------- .../realtime/streamline__blocks_realtime.sql | 82 ------- ...reamline__blocks_transactions_realtime.sql | 126 +++++++++++ .../streamline__confirm_blocks_realtime.sql | 177 +++++++++------ .../streamline__receipts_realtime.sql | 203 ++++++++++-------- .../realtime/streamline__traces_realtime.sql | 186 +++++++++------- .../streamline__transactions_realtime.sql | 104 --------- .../silver/core/retry/_missing_traces.sql | 32 +-- .../silver/core/streamline__blocks.sql | 32 ++- .../silver/core/streamline__get_chainhead.sql | 44 +++- .../silver/core/streamline__get_chainhead.yml | 9 + .../streamline__decoded_logs_complete.sql | 50 +++++ .../streamline__decoded_logs_realtime.sql | 110 ++++++++++ .../streamline__complete_decode_logs.sql | 32 --- .../streamline__decode_logs_realtime.sql | 84 -------- package-lock.yml | 18 -- packages.yml | 6 +- 88 files changed, 2625 insertions(+), 1498 deletions(-) create mode 100644 macros/fsc_evm_temp/decoder_package/streamline_external_table_queries_decoder.sql create mode 100644 macros/fsc_evm_temp/main_package/bronze/streamline_external_table_queries.sql create mode 100644 macros/fsc_evm_temp/main_package/logging/bronze.sql create mode 100644 macros/fsc_evm_temp/main_package/logging/complete.sql create mode 100644 macros/fsc_evm_temp/main_package/logging/logging.sql create mode 100644 macros/fsc_evm_temp/main_package/logging/requests.sql create mode 100644 macros/fsc_evm_temp/main_package/streamline/set_default_variables.sql create mode 100644 macros/fsc_evm_temp/main_package/streamline/set_streamline_parameters.sql delete mode 100644 macros/streamline/models.sql create mode 100644 models/streamline/bronze/core/bronze__blocks.sql create mode 100644 models/streamline/bronze/core/bronze__blocks_fr.sql create mode 100644 models/streamline/bronze/core/bronze__blocks_fr_v1.sql create mode 100644 models/streamline/bronze/core/bronze__blocks_fr_v2.sql create mode 100644 models/streamline/bronze/core/bronze__confirm_blocks.sql create mode 100644 models/streamline/bronze/core/bronze__confirm_blocks_fr.sql create mode 100644 models/streamline/bronze/core/bronze__confirm_blocks_fr_v2.sql create mode 100644 models/streamline/bronze/core/bronze__receipts.sql create mode 100644 models/streamline/bronze/core/bronze__receipts_fr.sql create mode 100644 models/streamline/bronze/core/bronze__receipts_fr_v2.sql delete mode 100644 models/streamline/bronze/core/bronze__streamline_blocks.sql delete mode 100644 models/streamline/bronze/core/bronze__streamline_confirm_blocks.sql delete mode 100644 models/streamline/bronze/core/bronze__streamline_receipts.sql delete mode 100644 models/streamline/bronze/core/bronze__streamline_traces.sql delete mode 100644 models/streamline/bronze/core/bronze__streamline_transactions.sql create mode 100644 models/streamline/bronze/core/bronze__traces.sql create mode 100644 models/streamline/bronze/core/bronze__traces_fr.sql create mode 100644 models/streamline/bronze/core/bronze__traces_fr_v2.sql create mode 100644 models/streamline/bronze/core/bronze__transactions.sql create mode 100644 models/streamline/bronze/core/bronze__transactions_fr.sql create mode 100644 models/streamline/bronze/core/bronze__transactions_fr_v1.sql create mode 100644 models/streamline/bronze/core/bronze__transactions_fr_v2.sql delete mode 100644 models/streamline/bronze/core/fr/bronze__streamline_fr_blocks.sql delete mode 100644 models/streamline/bronze/core/fr/bronze__streamline_fr_confirm_blocks.sql delete mode 100644 models/streamline/bronze/core/fr/bronze__streamline_fr_receipts.sql delete mode 100644 models/streamline/bronze/core/fr/bronze__streamline_fr_traces.sql delete mode 100644 models/streamline/bronze/core/fr/bronze__streamline_fr_transactions.sql create mode 100644 models/streamline/bronze/decoder/bronze__decoded_logs_fr.sql create mode 100644 models/streamline/bronze/decoder/bronze__decoded_logs_fr_v2.sql delete mode 100644 models/streamline/bronze/decoder/bronze__fr_decoded_logs.sql create mode 100644 models/streamline/silver/core/complete/streamline__blocks_complete.sql delete mode 100644 models/streamline/silver/core/complete/streamline__complete_blocks.sql delete mode 100644 models/streamline/silver/core/complete/streamline__complete_confirmed_blocks.sql delete mode 100644 models/streamline/silver/core/complete/streamline__complete_receipts.sql delete mode 100644 models/streamline/silver/core/complete/streamline__complete_traces.sql delete mode 100644 models/streamline/silver/core/complete/streamline__complete_transactions.sql create mode 100644 models/streamline/silver/core/complete/streamline__confirm_blocks_complete.sql create mode 100644 models/streamline/silver/core/complete/streamline__receipts_complete.sql create mode 100644 models/streamline/silver/core/complete/streamline__traces_complete.sql create mode 100644 models/streamline/silver/core/complete/streamline__transactions_complete.sql delete mode 100644 models/streamline/silver/core/history/streamline__blocks_history.sql create mode 100644 models/streamline/silver/core/history/streamline__blocks_transactions_history.sql create mode 100644 models/streamline/silver/core/history/streamline__confirm_blocks_history.sql delete mode 100644 models/streamline/silver/core/history/streamline__transactions_history.sql delete mode 100644 models/streamline/silver/core/realtime/streamline__blocks_realtime.sql create mode 100644 models/streamline/silver/core/realtime/streamline__blocks_transactions_realtime.sql delete mode 100644 models/streamline/silver/core/realtime/streamline__transactions_realtime.sql create mode 100644 models/streamline/silver/core/streamline__get_chainhead.yml create mode 100644 models/streamline/silver/decoded_logs/complete/streamline__decoded_logs_complete.sql create mode 100644 models/streamline/silver/decoded_logs/realtime/streamline__decoded_logs_realtime.sql delete mode 100644 models/streamline/silver/decoder/complete/streamline__complete_decode_logs.sql delete mode 100644 models/streamline/silver/decoder/realtime/streamline__decode_logs_realtime.sql delete mode 100644 package-lock.yml diff --git a/.github/workflows/dbt_run_streamline_chainhead.yml b/.github/workflows/dbt_run_streamline_chainhead.yml index 71cc7cc..c58fd61 100644 --- a/.github/workflows/dbt_run_streamline_chainhead.yml +++ b/.github/workflows/dbt_run_streamline_chainhead.yml @@ -43,4 +43,8 @@ jobs: dbt deps - name: Run DBT Jobs run: | - dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "blast_models,tag:streamline_core_complete" "blast_models,tag:streamline_core_realtime" + dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "blast_models,tag:streamline_core_complete" "blast_models,tag:streamline_core_realtime" "blast_models,tag:streamline_core_complete_receipts" "blast_models,tag:streamline_core_realtime_receipts" "blast_models,tag:streamline_core_complete_confirm_blocks" "blast_models,tag:streamline_core_realtime_confirm_blocks" + + - name: Run Chainhead Tests + run: | + dbt test -m "blast_models,tag:chainhead" diff --git a/.github/workflows/dbt_run_streamline_history_adhoc.yml b/.github/workflows/dbt_run_streamline_history_adhoc.yml index f197ccf..1b81eed 100644 --- a/.github/workflows/dbt_run_streamline_history_adhoc.yml +++ b/.github/workflows/dbt_run_streamline_history_adhoc.yml @@ -29,7 +29,7 @@ on: description: 'DBT Run Command' required: true options: - - dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "blast_models,tag:streamline_core_history" "blast_models,tag:streamline_core_complete" + - dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "blast_models,tag:streamline_core_complete" "blast_models,tag:streamline_core_history" "blast_models,tag:streamline_core_complete_receipts" "blast_models,tag:streamline_core_history_receipts" "blast_models,tag:streamline_core_complete_confirm_blocks" "blast_models,tag:streamline_core_history_confirm_blocks" env: DBT_PROFILES_DIR: ./ diff --git a/dbt_project.yml b/dbt_project.yml index 6985697..4e86f3a 100644 --- a/dbt_project.yml +++ b/dbt_project.yml @@ -80,12 +80,70 @@ vars: dev: API_INTEGRATION: AWS_BLAST_API_DEV EXTERNAL_FUNCTION_URI: y9d0tuavh6.execute-api.us-east-1.amazonaws.com/stg/ + ROLES: + - AWS_LAMBDA_BLAST_API + - INTERNAL_DEV prod: API_INTEGRATION: AWS_BLAST_API EXTERNAL_FUNCTION_URI: 42gzudc5si.execute-api.us-east-1.amazonaws.com/prod/ + ROLES: + - AWS_LAMBDA_BLAST_API + - INTERNAL_DEV + - DBT_CLOUD_BLAST #### STREAMLINE 2.0 END #### -### FSC_EVM - GLOBAL_PROD_DB_NAME: 'blast' \ No newline at end of file +#### FSC_EVM BEGIN #### +# Visit https://github.com/FlipsideCrypto/fsc-evm/wiki for more information on required and optional variables + + ### GLOBAL VARIABLES BEGIN ### + ## REQUIRED + GLOBAL_PROD_DB_NAME: 'blast' + GLOBAL_NODE_SECRET_PATH: 'Vault/prod/blast/quicknode/mainnet' + GLOBAL_BLOCKS_PER_HOUR: 1800 + + ### GLOBAL VARIABLES END ### + + ### MAIN_PACKAGE VARIABLES BEGIN ### + + ### CORE ### + ## REQUIRED + + ## OPTIONAL + # GOLD_FULL_REFRESH: True + # SILVER_FULL_REFRESH: True + # BRONZE_FULL_REFRESH: True + + # BLOCKS_COMPLETE_FULL_REFRESH: True + # CONFIRM_BLOCKS_COMPLETE_FULL_REFRESH: True + # TRACES_COMPLETE_FULL_REFRESH: True + # RECEIPTS_COMPLETE_FULL_REFRESH: True + # TRANSACTIONS_COMPLETE_FULL_REFRESH: True + + # BLOCKS_TRANSACTIONS_REALTIME_TESTING_LIMIT: 3 + # BLOCKS_TRANSACTIONS_HISTORY_TESTING_LIMIT: 3 + # TRACES_REALTIME_TESTING_LIMIT: 3 + # TRACES_HISTORY_TESTING_LIMIT: 3 + # ARBTRACE_BLOCK_HISTORY_TESTING_LIMIT: 3 + # RECEIPTS_REALTIME_TESTING_LIMIT: 3 + # RECEIPTS_HISTORY_TESTING_LIMIT: 3 + # CONFIRM_BLOCKS_REALTIME_TESTING_LIMIT: 3 + # CONFIRM_BLOCKS_HISTORY_TESTING_LIMIT: 3 + + # ### MAIN_PACKAGE VARIABLES END ### + + # ### DECODER_PACKAGE VARIABLES BEGIN ### + + # ## REQUIRED + + # ## OPTIONAL + + # DECODED_LOGS_COMPLETE_FULL_REFRESH: True + + # DECODED_LOGS_REALTIME_TESTING_LIMIT: 3 + # DECODED_LOGS_HISTORY_SQL_LIMIT: 1 #limit per monthly range + + ### DECODER_PACKAGE VARIABLES END ### + +#### FSC_EVM END #### \ No newline at end of file diff --git a/macros/decoder/decoded_logs_history.sql b/macros/decoder/decoded_logs_history.sql index 72f462b..4fe2282 100644 --- a/macros/decoder/decoded_logs_history.sql +++ b/macros/decoder/decoded_logs_history.sql @@ -1,28 +1,26 @@ {% macro decoded_logs_history(backfill_mode=false) %} {%- set params = { - "sql_limit": var("DECODED_LOGS_HISTORY_SQL_LIMIT", 7500000), + "sql_limit": var("DECODED_LOGS_HISTORY_SQL_LIMIT", 8000000), "producer_batch_size": var("DECODED_LOGS_HISTORY_PRODUCER_BATCH_SIZE", 400000), "worker_batch_size": var("DECODED_LOGS_HISTORY_WORKER_BATCH_SIZE", 100000) } -%} {% set wait_time = var("DECODED_LOGS_HISTORY_WAIT_TIME", 60) %} - {% set find_months_query %} SELECT DISTINCT date_trunc('month', block_timestamp)::date as month FROM {{ ref('core__fact_blocks') }} ORDER BY month ASC {% endset %} - {% set results = run_query(find_months_query) %} {% if execute %} {% set months = results.columns[0].values() %} - + {% for month in months %} {% set view_name = 'decoded_logs_history_' ~ month.strftime('%Y_%m') %} - + {% set create_view_query %} create or replace view streamline.{{view_name}} as ( WITH target_blocks AS ( @@ -45,7 +43,7 @@ ), existing_logs_to_exclude AS ( SELECT _log_id - FROM {{ ref('streamline__complete_decode_logs') }} l + FROM {{ ref('streamline__decoded_logs_complete') }} l INNER JOIN target_blocks b using (block_number) ), candidate_logs AS ( @@ -83,11 +81,9 @@ LIMIT {{ params.sql_limit }} ) {% endset %} - {# Create the view #} {% do run_query(create_view_query) %} {{ log("Created view for month " ~ month.strftime('%Y-%m'), info=True) }} - {% if var("STREAMLINE_INVOKE_STREAMS", false) %} {# Check if rows exist first #} {% set check_rows_query %} @@ -98,21 +94,22 @@ {% set has_rows = results.columns[0].values()[0] %} {% if has_rows %} - {# Invoke streamline since rows exist to decode #} + {# Invoke streamline, if rows exist to decode #} {% set decode_query %} - SELECT streamline.udf_bulk_decode_logs( - object_construct( - 'sql_source', '{{view_name}}', - 'external_table', 'DECODED_LOGS', - 'sql_limit', {{ params.sql_limit }}, - 'producer_batch_size', {{ params.producer_batch_size }}, - 'worker_batch_size', {{ params.worker_batch_size }}) + SELECT + streamline.udf_bulk_decode_logs_v2( + PARSE_JSON( + $${ "external_table": "decoded_logs", + "producer_batch_size": {{ params.producer_batch_size }}, + "sql_limit": {{ params.sql_limit }}, + "sql_source": "{{view_name}}", + "worker_batch_size": {{ params.worker_batch_size }} }$$ + ) ); {% endset %} {% do run_query(decode_query) %} {{ log("Triggered decoding for month " ~ month.strftime('%Y-%m'), info=True) }} - {# Call wait since we actually did some decoding #} {% do run_query("call system$wait(" ~ wait_time ~ ")") %} {{ log("Completed wait after decoding for month " ~ month.strftime('%Y-%m'), info=True) }} @@ -120,7 +117,7 @@ {{ log("No rows to decode for month " ~ month.strftime('%Y-%m'), info=True) }} {% endif %} {% endif %} - + {% endfor %} {% endif %} diff --git a/macros/decoder/run_decoded_logs_history.sql b/macros/decoder/run_decoded_logs_history.sql index 2a7f078..3954afb 100644 --- a/macros/decoder/run_decoded_logs_history.sql +++ b/macros/decoder/run_decoded_logs_history.sql @@ -8,9 +8,7 @@ where _inserted_timestamp::date = sysdate()::date and dayname(sysdate()) <> 'Sat' {% endset %} - {% set results = run_query(check_for_new_user_abis_query) %} - {% if execute %} {% set new_user_abis = results.columns[0].values()[0] %} diff --git a/macros/fsc_evm_temp/decoder_package/streamline_external_table_queries_decoder.sql b/macros/fsc_evm_temp/decoder_package/streamline_external_table_queries_decoder.sql new file mode 100644 index 0000000..318a3da --- /dev/null +++ b/macros/fsc_evm_temp/decoder_package/streamline_external_table_queries_decoder.sql @@ -0,0 +1,101 @@ +{% macro streamline_external_table_query_decoder( + source_name, + source_version + ) %} + + {% if source_version != '' %} + {% set source_version = '_' ~ source_version.lower() %} + {% endif %} + + WITH meta AS ( + SELECT + job_created_time AS _inserted_timestamp, + file_name, + CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 6), '_', 1) AS INTEGER) AS _partition_by_block_number, + TO_DATE( + concat_ws('-', SPLIT_PART(file_name, '/', 3), SPLIT_PART(file_name, '/', 4), SPLIT_PART(file_name, '/', 5)) + ) AS _partition_by_created_date + FROM + TABLE( + information_schema.external_table_file_registration_history( + start_time => DATEADD('day', -3, CURRENT_TIMESTAMP()), + table_name => '{{ source( "bronze_streamline", source_name ~ source_version) }}') + ) A + ) + SELECT + block_number, + id :: STRING AS id, + DATA, + metadata, + b.file_name, + _inserted_timestamp, + s._partition_by_block_number AS _partition_by_block_number, + s._partition_by_created_date AS _partition_by_created_date + FROM + {{ source( + "bronze_streamline", + source_name ~ source_version + ) }} + s + JOIN meta b + ON b.file_name = metadata$filename + AND b._partition_by_block_number = s._partition_by_block_number + AND b._partition_by_created_date = s._partition_by_created_date + WHERE + b._partition_by_block_number = s._partition_by_block_number + AND b._partition_by_created_date = s._partition_by_created_date + AND s._partition_by_created_date >= DATEADD('day', -2, CURRENT_TIMESTAMP()) + AND DATA :error IS NULL + AND DATA IS NOT NULL +{% endmacro %} + + +{% macro streamline_external_table_query_decoder_fr( + source_name, + source_version + ) %} + + {% if source_version != '' %} + {% set source_version = '_' ~ source_version.lower() %} + {% endif %} + + WITH meta AS ( + SELECT + registered_on AS _inserted_timestamp, + file_name, + CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 6), '_', 1) AS INTEGER) AS _partition_by_block_number, + TO_DATE( + concat_ws('-', SPLIT_PART(file_name, '/', 3), SPLIT_PART(file_name, '/', 4), SPLIT_PART(file_name, '/', 5)) + ) AS _partition_by_created_date + FROM + TABLE( + information_schema.external_table_files( + table_name => '{{ source( "bronze_streamline", source_name ~ source_version) }}' + ) + ) A + ) +SELECT + block_number, + id :: STRING AS id, + DATA, + metadata, + b.file_name, + _inserted_timestamp, + s._partition_by_block_number AS _partition_by_block_number, + s._partition_by_created_date AS _partition_by_created_date +FROM + {{ source( + "bronze_streamline", + source_name ~ source_version + ) }} + s + JOIN meta b + ON b.file_name = metadata$filename + AND b._partition_by_block_number = s._partition_by_block_number + AND b._partition_by_created_date = s._partition_by_created_date +WHERE + b._partition_by_block_number = s._partition_by_block_number + AND b._partition_by_created_date = s._partition_by_created_date + AND DATA :error IS NULL + AND DATA IS NOT NULL +{% endmacro %} diff --git a/macros/fsc_evm_temp/main_package/bronze/streamline_external_table_queries.sql b/macros/fsc_evm_temp/main_package/bronze/streamline_external_table_queries.sql new file mode 100644 index 0000000..c6f7919 --- /dev/null +++ b/macros/fsc_evm_temp/main_package/bronze/streamline_external_table_queries.sql @@ -0,0 +1,141 @@ +{% macro streamline_external_table_query( + source_name, + source_version, + partition_function, + balances, + block_number, + uses_receipts_by_hash + ) %} + + {% if source_version != '' %} + {% set source_version = '_' ~ source_version.lower() %} + {% endif %} + + WITH meta AS ( + SELECT + job_created_time AS _inserted_timestamp, + file_name, + {{ partition_function }} AS partition_key + FROM + TABLE( + information_schema.external_table_file_registration_history( + start_time => DATEADD('day', -3, CURRENT_TIMESTAMP()), + table_name => '{{ source( "bronze_streamline", source_name ~ source_version) }}') + ) A + ) + SELECT + s.*, + b.file_name, + b._inserted_timestamp + + {% if balances %}, + r.block_timestamp :: TIMESTAMP AS block_timestamp + {% endif %} + + {% if block_number %}, + COALESCE( + s.value :"BLOCK_NUMBER" :: STRING, + s.metadata :request :"data" :id :: STRING, + PARSE_JSON( + s.metadata :request :"data" + ) :id :: STRING + ) :: INT AS block_number + {% endif %} + {% if uses_receipts_by_hash %}, + s.value :"TX_HASH" :: STRING AS tx_hash + {% endif %} + FROM + {{ source( + "bronze_streamline", + source_name ~ source_version + ) }} + s + JOIN meta b + ON b.file_name = metadata$filename + AND b.partition_key = s.partition_key + + {% if balances %} + JOIN {{ ref('_block_ranges') }} + r + ON r.block_number = COALESCE( + s.value :"BLOCK_NUMBER" :: INT, + s.value :"block_number" :: INT + ) + {% endif %} + WHERE + b.partition_key = s.partition_key + AND DATA :error IS NULL + AND DATA IS NOT NULL +{% endmacro %} + +{% macro streamline_external_table_query_fr( + source_name, + source_version, + partition_function, + partition_join_key, + balances, + block_number, + uses_receipts_by_hash + ) %} + + {% if source_version != '' %} + {% set source_version = '_' ~ source_version.lower() %} + {% endif %} + + WITH meta AS ( + SELECT + registered_on AS _inserted_timestamp, + file_name, + {{ partition_function }} AS partition_key + FROM + TABLE( + information_schema.external_table_files( + table_name => '{{ source( "bronze_streamline", source_name ~ source_version) }}' + ) + ) A + ) +SELECT + s.*, + b.file_name, + b._inserted_timestamp + + {% if balances %}, + r.block_timestamp :: TIMESTAMP AS block_timestamp +{% endif %} + +{% if block_number %}, + COALESCE( + s.value :"BLOCK_NUMBER" :: STRING, + s.value :"block_number" :: STRING, + s.metadata :request :"data" :id :: STRING, + PARSE_JSON( + s.metadata :request :"data" + ) :id :: STRING + ) :: INT AS block_number +{% endif %} +{% if uses_receipts_by_hash %}, + s.value :"TX_HASH" :: STRING AS tx_hash +{% endif %} +FROM + {{ source( + "bronze_streamline", + source_name ~ source_version + ) }} + s + JOIN meta b + ON b.file_name = metadata$filename + AND b.partition_key = s.{{ partition_join_key }} + + {% if balances %} + JOIN {{ ref('_block_ranges') }} + r + ON r.block_number = COALESCE( + s.value :"BLOCK_NUMBER" :: INT, + s.value :"block_number" :: INT + ) + {% endif %} +WHERE + b.partition_key = s.{{ partition_join_key }} + AND DATA :error IS NULL + AND DATA IS NOT NULL +{% endmacro %} diff --git a/macros/fsc_evm_temp/main_package/logging/bronze.sql b/macros/fsc_evm_temp/main_package/logging/bronze.sql new file mode 100644 index 0000000..b367deb --- /dev/null +++ b/macros/fsc_evm_temp/main_package/logging/bronze.sql @@ -0,0 +1,36 @@ +{% macro log_bronze_details(source_name, source_version, model_type, partition_function, partition_join_key, block_number, uses_receipts_by_hash) %} + +{% if source_version != '' %} + {% set source_version = '_' ~ source_version.lower() %} +{% endif %} +{% if model_type != '' %} + {% set model_type = '_' ~ model_type %} +{% endif %} + +{%- if flags.WHICH == 'compile' and execute -%} + + {{ log("=== Current Variable Settings ===", info=True) }} + {{ log(source_name ~ model_type ~ '_PARTITION_FUNCTION: ' ~ partition_function, info=True) }} + {{ log(source_name ~ model_type ~ '_PARTITION_JOIN_KEY: ' ~ partition_join_key, info=True) }} + {{ log(source_name ~ model_type ~ '_BLOCK_NUMBER: ' ~ block_number, info=True) }} + {% if uses_receipts_by_hash %} + {{ log("USES_RECEIPTS_BY_HASH: " ~ uses_receipts_by_hash, info=True) }} + {% endif %} + + {{ log("", info=True) }} + {{ log("=== Source Details ===", info=True) }} + {{ log("Source: " ~ source('bronze_streamline', source_name.lower() ~ source_version.lower()), info=True) }} + {{ log("", info=True) }} + + {% set config_log = '\n' %} + {% set config_log = config_log ~ '\n=== DBT Model Config ===\n'%} + {% set config_log = config_log ~ '\n{{ config (\n' %} + {% set config_log = config_log ~ ' materialized = "' ~ config.get('materialized') ~ '",\n' %} + {% set config_log = config_log ~ ' tags = ' ~ config.get('tags') | tojson ~ '\n' %} + {% set config_log = config_log ~ ') }}\n' %} + {{ log(config_log, info=True) }} + {{ log("", info=True) }} + +{%- endif -%} + +{% endmacro %} \ No newline at end of file diff --git a/macros/fsc_evm_temp/main_package/logging/complete.sql b/macros/fsc_evm_temp/main_package/logging/complete.sql new file mode 100644 index 0000000..3637b41 --- /dev/null +++ b/macros/fsc_evm_temp/main_package/logging/complete.sql @@ -0,0 +1,29 @@ +{% macro log_complete_details(post_hook, full_refresh_type, uses_receipts_by_hash) %} + +{%- if flags.WHICH == 'compile' and execute -%} + + {% if uses_receipts_by_hash %} + + {{ log("=== Current Variable Settings ===", info=True) }} + {{ log("USES_RECEIPTS_BY_HASH: " ~ uses_receipts_by_hash, info=True) }} + + {% endif %} + + {% set config_log = '\n' %} + {% set config_log = config_log ~ '\n=== DBT Model Config ===\n'%} + {% set config_log = config_log ~ '\n{{ config (\n' %} + {% set config_log = config_log ~ ' materialized = "' ~ config.get('materialized') ~ '",\n' %} + {% set config_log = config_log ~ ' unique_key = "' ~ config.get('unique_key') ~ '",\n' %} + {% set config_log = config_log ~ ' cluster_by = "' ~ config.get('cluster_by') ~ '",\n' %} + {% set config_log = config_log ~ ' merge_update_columns = ' ~ config.get('merge_update_columns') | tojson ~ ',\n' %} + {% set config_log = config_log ~ ' post_hook = "' ~ post_hook ~ '",\n' %} + {% set config_log = config_log ~ ' incremental_predicates = ' ~ config.get('incremental_predicates') | tojson ~ ',\n' %} + {% set config_log = config_log ~ ' full_refresh = ' ~ full_refresh_type ~ ',\n' %} + {% set config_log = config_log ~ ' tags = ' ~ config.get('tags') | tojson ~ '\n' %} + {% set config_log = config_log ~ ') }}\n' %} + {{ log(config_log, info=True) }} + {{ log("", info=True) }} + +{%- endif -%} + +{% endmacro %} \ No newline at end of file diff --git a/macros/fsc_evm_temp/main_package/logging/logging.sql b/macros/fsc_evm_temp/main_package/logging/logging.sql new file mode 100644 index 0000000..f368685 --- /dev/null +++ b/macros/fsc_evm_temp/main_package/logging/logging.sql @@ -0,0 +1,36 @@ +{% macro log_model_details(vars=false, params=false) %} + +{%- if execute -%} +/* +DBT Model Config: +{{ model.config | tojson(indent=2) }} +*/ + +{% if vars is not false %} + +{% if var('LOG_MODEL_DETAILS', false) %} +{{ log( vars | tojson(indent=2), info=True) }} +{% endif %} +/* +Variables: +{{ vars | tojson(indent=2) }} +*/ +{% endif %} + +{% if params is not false %} + +{% if var('LOG_MODEL_DETAILS', false) %} +{{ log( params | tojson(indent=2), info=True) }} +{% endif %} +/* +Parameters: +{{ params | tojson(indent=2) }} +*/ +{% endif %} + +/* +Raw Code: +{{ model.raw_code }} +*/ +{%- endif -%} +{% endmacro %} \ No newline at end of file diff --git a/macros/fsc_evm_temp/main_package/logging/requests.sql b/macros/fsc_evm_temp/main_package/logging/requests.sql new file mode 100644 index 0000000..85cbbda --- /dev/null +++ b/macros/fsc_evm_temp/main_package/logging/requests.sql @@ -0,0 +1,55 @@ +{% macro log_streamline_details(model_name, model_type, node_url, model_quantum_state, sql_limit, testing_limit, order_by_clause, new_build, streamline_params, uses_receipts_by_hash, method, method_params, min_block=0) %} + +{%- if flags.WHICH == 'compile' and execute -%} + + {{ log("=== Current Variable Settings ===", info=True) }} + {{ log("START_UP_BLOCK: " ~ min_block, info=True) }} + {{ log("", info=True) }} + + {{ log("=== API Details ===", info=True) }} + + {{ log("NODE_URL: " ~ node_url, info=True) }} + {{ log("NODE_SECRET_PATH: " ~ var('GLOBAL_NODE_SECRET_PATH'), info=True) }} + {{ log("", info=True) }} + + {{ log("=== Current Variable Settings ===", info=True) }} + + {{ log((model_name ~ '_' ~ model_type ~ '_model_quantum_state').upper() ~ ': ' ~ model_quantum_state, info=True) }} + {{ log((model_name ~ '_' ~ model_type ~ '_sql_limit').upper() ~ ': ' ~ sql_limit, info=True) }} + {{ log((model_name ~ '_' ~ model_type ~ '_testing_limit').upper() ~ ': ' ~ testing_limit, info=True) }} + {{ log((model_name ~ '_' ~ model_type ~ '_order_by_clause').upper() ~ ': ' ~ order_by_clause, info=True) }} + {{ log((model_name ~ '_' ~ model_type ~ '_new_build').upper() ~ ': ' ~ new_build, info=True) }} + {{ log('USES_RECEIPTS_BY_HASH' ~ ': ' ~ uses_receipts_by_hash, info=True) }} + {{ log("", info=True) }} + + {{ log("=== RPC Details ===", info=True) }} + + {{ log(model_name ~ ": {", info=True) }} + {{ log(" method: '" ~ method ~ "',", info=True) }} + {{ log(" method_params: " ~ method_params, info=True) }} + {{ log("}", info=True) }} + {{ log("", info=True) }} + + {% set params_str = streamline_params | tojson %} + {% set params_formatted = params_str | replace('{', '{\n ') | replace('}', '\n }') | replace(', ', ',\n ') %} + + {# Clean up the method_params formatting #} + {% set params_formatted = params_formatted | replace('"method_params": "', '"method_params": "') | replace('\\n', ' ') | replace('\\u0027', "'") %} + + {% set config_log = '\n' %} + {% set config_log = config_log ~ '\n=== DBT Model Config ===\n'%} + {% set config_log = config_log ~ '\n{{ config (\n' %} + {% set config_log = config_log ~ ' materialized = "' ~ config.get('materialized') ~ '",\n' %} + {% set config_log = config_log ~ ' post_hook = fsc_utils.if_data_call_function_v2(\n' %} + {% set config_log = config_log ~ ' func = "streamline.udf_bulk_rest_api_v2",\n' %} + {% set config_log = config_log ~ ' target = "' ~ this.schema ~ '.' ~ this.identifier ~ '",\n' %} + {% set config_log = config_log ~ ' params = ' ~ params_formatted ~ '\n' %} + {% set config_log = config_log ~ ' ),\n' %} + {% set config_log = config_log ~ ' tags = ' ~ config.get('tags') | tojson ~ '\n' %} + {% set config_log = config_log ~ ') }}\n' %} + {{ log(config_log, info=True) }} + {{ log("", info=True) }} + +{%- endif -%} + +{% endmacro %} \ No newline at end of file diff --git a/macros/fsc_evm_temp/main_package/streamline/set_default_variables.sql b/macros/fsc_evm_temp/main_package/streamline/set_default_variables.sql new file mode 100644 index 0000000..598c3b9 --- /dev/null +++ b/macros/fsc_evm_temp/main_package/streamline/set_default_variables.sql @@ -0,0 +1,47 @@ +{% macro set_default_variables_streamline(model_name, model_type) %} + +{%- set node_url = var('GLOBAL_NODE_URL', '{Service}/{Authentication}') -%} +{%- set node_secret_path = var('GLOBAL_NODE_SECRET_PATH', '') -%} +{%- set model_quantum_state = var((model_name ~ '_' ~ model_type ~ '_quantum_state').upper(), 'streamline') -%} +{%- set testing_limit = var((model_name ~ '_' ~ model_type ~ '_testing_limit').upper(), none) -%} +{%- set new_build = var((model_name ~ '_' ~ model_type ~ '_new_build').upper(), false) -%} +{%- set default_order = 'ORDER BY partition_key DESC, block_number DESC' if model_type.lower() == 'realtime' + else 'ORDER BY partition_key ASC, block_number ASC' -%} +{%- set order_by_clause = var((model_name ~ '_' ~ model_type ~ '_order_by_clause').upper(), default_order) -%} +{%- set uses_receipts_by_hash = var('GLOBAL_USES_RECEIPTS_BY_HASH', false) -%} + +{%- set variables = { + 'node_url': node_url, + 'node_secret_path': node_secret_path, + 'model_quantum_state': model_quantum_state, + 'testing_limit': testing_limit, + 'new_build': new_build, + 'order_by_clause': order_by_clause, + 'uses_receipts_by_hash': uses_receipts_by_hash +} -%} + +{{ return(variables) }} + +{% endmacro %} + +{% macro set_default_variables_bronze(source_name, model_type) %} + +{%- set partition_function = var(source_name ~ model_type ~ '_PARTITION_FUNCTION', + "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER)") +-%} +{%- set partition_join_key = var(source_name ~ model_type ~ '_PARTITION_JOIN_KEY', 'partition_key') -%} +{%- set block_number = var(source_name ~ model_type ~ '_BLOCK_NUMBER', true) -%} +{%- set balances = var(source_name ~ model_type ~ '_BALANCES', false) -%} +{%- set uses_receipts_by_hash = var('GLOBAL_USES_RECEIPTS_BY_HASH', false) -%} + +{%- set variables = { + 'partition_function': partition_function, + 'partition_join_key': partition_join_key, + 'block_number': block_number, + 'balances': balances, + 'uses_receipts_by_hash': uses_receipts_by_hash +} -%} + +{{ return(variables) }} + +{% endmacro %} \ No newline at end of file diff --git a/macros/fsc_evm_temp/main_package/streamline/set_streamline_parameters.sql b/macros/fsc_evm_temp/main_package/streamline/set_streamline_parameters.sql new file mode 100644 index 0000000..9c7262f --- /dev/null +++ b/macros/fsc_evm_temp/main_package/streamline/set_streamline_parameters.sql @@ -0,0 +1,57 @@ +{% macro set_streamline_parameters(model_name, model_type, multiplier=1) %} + +{%- set rpc_config_details = { + "blocks_transactions": { + "method": 'eth_getBlockByNumber', + "method_params": 'ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number), TRUE)', + "exploded_key": ['result', 'result.transactions'] + }, + "receipts_by_hash": { + "method": 'eth_getTransactionReceipt', + "method_params": 'ARRAY_CONSTRUCT(tx_hash)' + }, + "receipts": { + "method": 'eth_getBlockReceipts', + "method_params": 'ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number))', + "exploded_key": ['result'], + "lambdas": 2 + + }, + "traces": { + "method": 'debug_traceBlockByNumber', + "method_params": "ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number), OBJECT_CONSTRUCT('tracer', 'callTracer', 'timeout', '120s'))", + "exploded_key": ['result'], + "lambdas": 2 + }, + "confirm_blocks": { + "method": 'eth_getBlockByNumber', + "method_params": 'ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number), FALSE)' + } +} -%} + +{%- set rpc_config = rpc_config_details[model_name.lower()] -%} + +{%- set params = { + "external_table": var((model_name ~ '_' ~ model_type ~ '_external_table').upper(), model_name.lower()), + "sql_limit": var((model_name ~ '_' ~ model_type ~ '_sql_limit').upper(), 2 * var('GLOBAL_BLOCKS_PER_HOUR',0) * multiplier), + "producer_batch_size": var((model_name ~ '_' ~ model_type ~ '_producer_batch_size').upper(), 2 * var('GLOBAL_BLOCKS_PER_HOUR',0) * multiplier), + "worker_batch_size": var( + (model_name ~ '_' ~ model_type ~ '_worker_batch_size').upper(), + (2 * var('GLOBAL_BLOCKS_PER_HOUR',0) * multiplier) // (rpc_config.get('lambdas', 1)) + ), + "sql_source": (model_name ~ '_' ~ model_type).lower(), + "method": rpc_config['method'], + "method_params": rpc_config['method_params'] +} -%} + +{%- if rpc_config.get('exploded_key') is not none -%} + {%- do params.update({"exploded_key": tojson(rpc_config['exploded_key'])}) -%} +{%- endif -%} + +{%- if rpc_config.get('lambdas') is not none -%} + {%- do params.update({"lambdas": rpc_config['lambdas']}) -%} +{%- endif -%} + +{{ return(params) }} + +{% endmacro %} \ No newline at end of file diff --git a/macros/streamline/models.sql b/macros/streamline/models.sql deleted file mode 100644 index fc80c0c..0000000 --- a/macros/streamline/models.sql +++ /dev/null @@ -1,186 +0,0 @@ -{% macro streamline_external_table_query( - model, - partition_function, - partition_name, - unique_key - ) %} - WITH meta AS ( - SELECT - last_modified AS _inserted_timestamp, - file_name, - {{ partition_function }} AS {{ partition_name }} - FROM - TABLE( - information_schema.external_table_file_registration_history( - start_time => DATEADD('day', -3, CURRENT_TIMESTAMP()), - table_name => '{{ source( "bronze_streamline", model) }}') - ) A - ) - SELECT - {{ unique_key }} AS block_number, - DATA, - _inserted_timestamp, - MD5( - CAST( - COALESCE(CAST({{ unique_key }} AS text), '' :: STRING) AS text - ) - ) AS id, - s.{{ partition_name }}, - s.value AS VALUE - FROM - {{ source( - "bronze_streamline", - model - ) }} - s - JOIN meta b - ON b.file_name = metadata$filename - AND b.{{ partition_name }} = s.{{ partition_name }} - WHERE - b.{{ partition_name }} = s.{{ partition_name }} - AND ( - DATA :error :code IS NULL - OR DATA :error :code NOT IN ( - '-32000', - '-32001', - '-32002', - '-32003', - '-32004', - '-32005', - '-32006', - '-32007', - '-32008', - '-32009', - '-32010', - '-32608' - ) - ) -{% endmacro %} - -{% macro streamline_external_table_fr_query( - model, - partition_function, - partition_name, - unique_key - ) %} - WITH meta AS ( - SELECT - registered_on AS _inserted_timestamp, - file_name, - {{ partition_function }} AS {{ partition_name }} - FROM - TABLE( - information_schema.external_table_files( - table_name => '{{ source( "bronze_streamline", model) }}' - ) - ) A - ) -SELECT - {{ unique_key }} AS block_number, - DATA, - _inserted_timestamp, - MD5( - CAST( - COALESCE(CAST({{ unique_key }} AS text), '' :: STRING) AS text - ) - ) AS id, - s.{{ partition_name }}, - s.value AS VALUE -FROM - {{ source( - "bronze_streamline", - model - ) }} - s - JOIN meta b - ON b.file_name = metadata$filename - AND b.{{ partition_name }} = s.{{ partition_name }} -WHERE - b.{{ partition_name }} = s.{{ partition_name }} - AND ( - DATA :error :code IS NULL - OR DATA :error :code NOT IN ( - '-32000', - '-32001', - '-32002', - '-32003', - '-32004', - '-32005', - '-32006', - '-32007', - '-32008', - '-32009', - '-32010', - '-32608' - ) - ) -{% endmacro %} - -{% macro streamline_external_table_query_v2( - model, - partition_function - ) %} - WITH meta AS ( - SELECT - job_created_time AS _inserted_timestamp, - file_name, - {{ partition_function }} AS partition_key - FROM - TABLE( - information_schema.external_table_file_registration_history( - start_time => DATEADD('day', -3, CURRENT_TIMESTAMP()), - table_name => '{{ source( "bronze_streamline", model) }}') - ) A - ) - SELECT - s.*, - b.file_name, - _inserted_timestamp - FROM - {{ source( - "bronze_streamline", - model - ) }} - s - JOIN meta b - ON b.file_name = metadata$filename - AND b.partition_key = s.partition_key - WHERE - b.partition_key = s.partition_key - AND DATA :error IS NULL - -{% endmacro %} - -{% macro streamline_external_table_fr_query_v2( - model, - partition_function - ) %} - WITH meta AS ( - SELECT - registered_on AS _inserted_timestamp, - file_name, - {{ partition_function }} AS partition_key - FROM - TABLE( - information_schema.external_table_files( - table_name => '{{ source( "bronze_streamline", model) }}' - ) - ) A - ) -SELECT - s.*, - b.file_name, - _inserted_timestamp -FROM - {{ source( - "bronze_streamline", - model - ) }} - s - JOIN meta b - ON b.file_name = metadata$filename - AND b.partition_key = s.partition_key -WHERE - b.partition_key = s.partition_key - AND DATA :error IS NULL -{% endmacro %} \ No newline at end of file diff --git a/models/bronze/api_udf/bronze_api__contract_abis.sql b/models/bronze/api_udf/bronze_api__contract_abis.sql index 0d8fa36..0ed98b3 100644 --- a/models/bronze/api_udf/bronze_api__contract_abis.sql +++ b/models/bronze/api_udf/bronze_api__contract_abis.sql @@ -50,7 +50,18 @@ row_nos AS ( ), batched AS ({% for item in range(501) %} SELECT - rn.contract_address, live.udf_api('GET', CONCAT('https://api.blastscan.io/api?module=contract&action=getabi&address=', rn.contract_address, '&apikey={key}'),{ 'User-Agent': 'FlipsideStreamline' },{}, 'Vault/prod/block_explorers/blast_scan') AS abi_data, SYSDATE() AS _inserted_timestamp + rn.contract_address, + live.udf_api( + 'GET', + CONCAT('https://api.blastscan.io/api?module=contract&action=getabi&address=',rn.contract_address,'&apikey={key}'), + OBJECT_CONSTRUCT( + 'Content-Type', 'application/json', + 'fsc-quantum-state', 'livequery' + ), + NULL, + 'Vault/prod/block_explorers/blast_scan' + ) AS abi_data, + SYSDATE() AS _inserted_timestamp FROM row_nos rn WHERE diff --git a/models/gold/core/core__fact_traces.sql b/models/gold/core/core__fact_traces.sql index da80672..28daa38 100644 --- a/models/gold/core/core__fact_traces.sql +++ b/models/gold/core/core__fact_traces.sql @@ -2,7 +2,7 @@ materialized = "incremental", incremental_strategy = 'delete+insert', unique_key = "block_number", - incremental_predicates = [fsc_evm.standard_predicate()], + incremental_predicates = [standard_predicate()], cluster_by = "block_timestamp::date", post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION", tags = ['non_realtime','core'], diff --git a/models/silver/core/silver__blocks.sql b/models/silver/core/silver__blocks.sql index 068f6c8..1ad8362 100644 --- a/models/silver/core/silver__blocks.sql +++ b/models/silver/core/silver__blocks.sql @@ -1,4 +1,4 @@ --- depends_on: {{ ref('bronze__streamline_blocks') }} +-- depends_on: {{ ref('bronze__blocks') }} {{ config( materialized = 'incremental', unique_key = "block_number", @@ -19,48 +19,48 @@ SELECT ) :id :: INT ) AS block_number, utils.udf_hex_to_int( - DATA :result :baseFeePerGas :: STRING + DATA :baseFeePerGas :: STRING ) :: INT AS base_fee_per_gas, utils.udf_hex_to_int( - DATA :result :difficulty :: STRING + DATA :difficulty :: STRING ) :: INT AS difficulty, - DATA :result :extraData :: STRING AS extra_data, + DATA :extraData :: STRING AS extra_data, utils.udf_hex_to_int( - DATA :result :gasLimit :: STRING + DATA :gasLimit :: STRING ) :: INT AS gas_limit, utils.udf_hex_to_int( - DATA :result :gasUsed :: STRING + DATA :gasUsed :: STRING ) :: INT AS gas_used, - DATA :result :hash :: STRING AS HASH, - DATA :result :logsBloom :: STRING AS logs_bloom, - DATA :result :miner :: STRING AS miner, - DATA :result :mixHash :: STRING AS mixHash, + DATA :hash :: STRING AS HASH, + DATA :logsBloom :: STRING AS logs_bloom, + DATA :miner :: STRING AS miner, + DATA :mixHash :: STRING AS mixHash, utils.udf_hex_to_int( - DATA :result :nonce :: STRING + DATA :nonce :: STRING ) :: INT AS nonce, utils.udf_hex_to_int( - DATA :result :number :: STRING + DATA :number :: STRING ) :: INT AS NUMBER, - DATA :result :parentHash :: STRING AS parent_hash, - DATA :result :receiptsRoot :: STRING AS receipts_root, - DATA :result :sha3Uncles :: STRING AS sha3_uncles, + DATA :parentHash :: STRING AS parent_hash, + DATA :receiptsRoot :: STRING AS receipts_root, + DATA :sha3Uncles :: STRING AS sha3_uncles, utils.udf_hex_to_int( - DATA :result :size :: STRING + DATA :size :: STRING ) :: INT AS SIZE, - DATA :result :stateRoot :: STRING AS state_root, + DATA :stateRoot :: STRING AS state_root, utils.udf_hex_to_int( - DATA :result :timestamp :: STRING + DATA :timestamp :: STRING ) :: TIMESTAMP AS block_timestamp, utils.udf_hex_to_int( - DATA :result :totalDifficulty :: STRING + DATA :totalDifficulty :: STRING ) :: INT AS total_difficulty, ARRAY_SIZE( - DATA :result :transactions + DATA :transactions ) AS tx_count, - DATA :result :transactionsRoot :: STRING AS transactions_root, - DATA :result :uncles AS uncles, - DATA :result :withdrawals AS withdrawals, - DATA :result :withdrawalsRoot :: STRING AS withdrawals_root, + DATA :transactionsRoot :: STRING AS transactions_root, + DATA :uncles AS uncles, + DATA :withdrawals AS withdrawals, + DATA :withdrawalsRoot :: STRING AS withdrawals_root, _inserted_timestamp, {{ dbt_utils.generate_surrogate_key( ['block_number'] @@ -71,7 +71,7 @@ SELECT FROM {% if is_incremental() %} -{{ ref('bronze__streamline_blocks') }} +{{ ref('bronze__blocks') }} WHERE _inserted_timestamp >= ( SELECT @@ -80,7 +80,7 @@ WHERE {{ this }} ) {% else %} - {{ ref('bronze__streamline_fr_blocks') }} + {{ ref('bronze__blocks_fr') }} {% endif %} qualify(ROW_NUMBER() over (PARTITION BY block_number diff --git a/models/silver/core/silver__confirmed_blocks.sql b/models/silver/core/silver__confirmed_blocks.sql index 25c141f..3b677f3 100644 --- a/models/silver/core/silver__confirmed_blocks.sql +++ b/models/silver/core/silver__confirmed_blocks.sql @@ -1,4 +1,4 @@ --- depends_on: {{ ref('bronze__streamline_confirm_blocks') }} +-- depends_on: {{ ref('bronze__confirm_blocks') }} {{ config( materialized = 'incremental', incremental_strategy = 'delete+insert', @@ -23,7 +23,7 @@ WITH base AS ( FROM {% if is_incremental() %} -{{ ref('bronze__streamline_confirm_blocks') }} +{{ ref('bronze__confirm_blocks') }} WHERE _inserted_timestamp >= ( SELECT @@ -37,7 +37,7 @@ WHERE {{ this }} ) {% else %} - {{ ref('bronze__streamline_fr_confirm_blocks') }} + {{ ref('bronze__confirm_blocks_fr') }} {% endif %} qualify(ROW_NUMBER() over (PARTITION BY block_number diff --git a/models/silver/core/silver__decoded_logs.sql b/models/silver/core/silver__decoded_logs.sql index b2d44a8..a552960 100644 --- a/models/silver/core/silver__decoded_logs.sql +++ b/models/silver/core/silver__decoded_logs.sql @@ -42,7 +42,7 @@ WHERE ) AND DATA NOT ILIKE '%Event topic is not present in given ABI%' {% else %} - {{ ref('bronze__fr_decoded_logs') }} + {{ ref('bronze__decoded_logs_fr') }} WHERE DATA NOT ILIKE '%Event topic is not present in given ABI%' {% endif %} diff --git a/models/silver/core/silver__receipts.sql b/models/silver/core/silver__receipts.sql index 756810f..904ccac 100644 --- a/models/silver/core/silver__receipts.sql +++ b/models/silver/core/silver__receipts.sql @@ -1,4 +1,4 @@ --- depends_on: {{ ref('bronze__streamline_receipts') }} +-- depends_on: {{ ref('bronze__receipts') }} {{ config( materialized = 'incremental', incremental_strategy = 'delete+insert', @@ -24,7 +24,7 @@ WITH base AS ( FROM {% if is_incremental() %} -{{ ref('bronze__streamline_receipts') }} +{{ ref('bronze__receipts') }} WHERE _inserted_timestamp >= ( SELECT @@ -34,7 +34,7 @@ WHERE ) AND IS_OBJECT(DATA) {% else %} - {{ ref('bronze__streamline_fr_receipts') }} + {{ ref('bronze__receipts_fr') }} WHERE IS_OBJECT(DATA) {% endif %} diff --git a/models/silver/core/silver__traces.sql b/models/silver/core/silver__traces.sql index 9042f5f..f82e350 100644 --- a/models/silver/core/silver__traces.sql +++ b/models/silver/core/silver__traces.sql @@ -1,4 +1,4 @@ --- depends_on: {{ ref('bronze__streamline_traces') }} +-- depends_on: {{ ref('bronze__traces') }} {{ config ( materialized = "incremental", incremental_strategy = 'delete+insert', @@ -8,12 +8,7 @@ tags = ['non_realtime'], full_refresh = false ) }} -{# {{ fsc_evm.silver_traces_v1( -full_reload_start_block = 3000000, -full_reload_blocks = 1000000, -use_partition_key = TRUE -) }} -#} + WITH bronze_traces AS ( SELECT @@ -25,7 +20,7 @@ WITH bronze_traces AS ( FROM {% if is_incremental() and not full_reload_mode %} -{{ ref('bronze__streamline_traces') }} +{{ ref('bronze__traces') }} WHERE _inserted_timestamp >= ( SELECT @@ -34,7 +29,7 @@ WHERE {{ this }} ) AND DATA :result IS NOT NULL {% elif is_incremental() and full_reload_mode %} - {{ ref('bronze__streamline_fr_traces') }} + {{ ref('bronze__traces_fr') }} WHERE partition_key BETWEEN ( SELECT @@ -49,7 +44,7 @@ WHERE {{ this }} ) {% else %} - {{ ref('bronze__streamline_fr_traces') }} + {{ ref('bronze__traces_fr') }} WHERE partition_key <= 3000000 {% endif %} diff --git a/models/silver/core/silver__transactions.sql b/models/silver/core/silver__transactions.sql index 24efe99..72e79cc 100644 --- a/models/silver/core/silver__transactions.sql +++ b/models/silver/core/silver__transactions.sql @@ -1,4 +1,4 @@ --- depends_on: {{ ref('bronze__streamline_transactions') }} +-- depends_on: {{ ref('bronze__transactions') }} {{ config( materialized = 'incremental', incremental_strategy = 'delete+insert', @@ -24,7 +24,7 @@ WITH base AS ( FROM {% if is_incremental() %} -{{ ref('bronze__streamline_transactions') }} +{{ ref('bronze__transactions') }} WHERE _inserted_timestamp >= ( SELECT @@ -34,7 +34,7 @@ WHERE ) AND IS_OBJECT(DATA) {% else %} - {{ ref('bronze__streamline_fr_transactions') }} + {{ ref('bronze__transactions_fr') }} WHERE IS_OBJECT(DATA) {% endif %} diff --git a/models/sources.yml b/models/sources.yml index af2914e..af97ee9 100644 --- a/models/sources.yml +++ b/models/sources.yml @@ -12,6 +12,8 @@ sources: - name: traces - name: decoded_logs - name: confirm_blocks + - name: blocks_v2 + - name: transactions_v2 - name: udfs_streamline database: udfs schema: streamline diff --git a/models/streamline/bronze/core/bronze__blocks.sql b/models/streamline/bronze/core/bronze__blocks.sql new file mode 100644 index 0000000..c3b377f --- /dev/null +++ b/models/streamline/bronze/core/bronze__blocks.sql @@ -0,0 +1,39 @@ +{# Set variables #} +{% set source_name = 'BLOCKS' %} +{% set source_version = 'V2' %} +{% set model_type = '' %} + +{%- set default_vars = set_default_variables_bronze(source_name, model_type) -%} + +{% set partition_function = default_vars['partition_function'] %} +{% set partition_join_key = default_vars['partition_join_key'] %} +{% set balances = default_vars['balances'] %} +{% set block_number = default_vars['block_number'] %} +{% set uses_receipts_by_hash = default_vars['uses_receipts_by_hash'] %} + +{# Log configuration details #} +{{ log_bronze_details( + source_name = source_name, + source_version = source_version, + model_type = model_type, + partition_function = partition_function, + partition_join_key = partition_join_key, + block_number = block_number, + uses_receipts_by_hash = uses_receipts_by_hash +) }} + +{# Set up dbt configuration #} +{{ config ( + materialized = 'view', + tags = ['bronze_core'] +) }} + +{# Main query starts here #} +{{ streamline_external_table_query( + source_name = source_name.lower(), + source_version = source_version.lower(), + partition_function = partition_function, + balances = balances, + block_number = block_number, + uses_receipts_by_hash = uses_receipts_by_hash +) }} \ No newline at end of file diff --git a/models/streamline/bronze/core/bronze__blocks_fr.sql b/models/streamline/bronze/core/bronze__blocks_fr.sql new file mode 100644 index 0000000..781d2b5 --- /dev/null +++ b/models/streamline/bronze/core/bronze__blocks_fr.sql @@ -0,0 +1,26 @@ +{{ config ( + materialized = 'view', + tags = ['bronze_core'] +) }} + +SELECT + partition_key, + block_number, + VALUE, + DATA, + metadata, + file_name, + _inserted_timestamp +FROM + {{ ref('bronze__blocks_fr_v2') }} +UNION ALL +SELECT + partition_key, + block_number, + VALUE, + DATA :result AS DATA, + metadata, + file_name, + _inserted_timestamp +FROM + {{ ref('bronze__blocks_fr_v1') }} \ No newline at end of file diff --git a/models/streamline/bronze/core/bronze__blocks_fr_v1.sql b/models/streamline/bronze/core/bronze__blocks_fr_v1.sql new file mode 100644 index 0000000..855b42b --- /dev/null +++ b/models/streamline/bronze/core/bronze__blocks_fr_v1.sql @@ -0,0 +1,40 @@ +{# Set variables #} +{% set source_name = 'BLOCKS' %} +{% set source_version = '' %} +{% set model_type = 'FR' %} + +{%- set default_vars = set_default_variables_bronze(source_name, model_type) -%} + +{% set partition_function = default_vars['partition_function'] %} +{% set partition_join_key = default_vars['partition_join_key'] %} +{% set balances = default_vars['balances'] %} +{% set block_number = default_vars['block_number'] %} +{% set uses_receipts_by_hash = default_vars['uses_receipts_by_hash'] %} + +{# Log configuration details #} +{{ log_bronze_details( + source_name = source_name, + source_version = source_version, + model_type = model_type, + partition_function = partition_function, + partition_join_key = partition_join_key, + block_number = block_number, + uses_receipts_by_hash = uses_receipts_by_hash +) }} + +{# Set up dbt configuration #} +{{ config ( + materialized = 'view', + tags = ['bronze_core'] +) }} + +{# Main query starts here #} +{{ streamline_external_table_query_fr( + source_name = source_name.lower(), + source_version = source_version.lower(), + partition_function = partition_function, + partition_join_key = partition_join_key, + balances = balances, + block_number = block_number, + uses_receipts_by_hash = uses_receipts_by_hash +) }} \ No newline at end of file diff --git a/models/streamline/bronze/core/bronze__blocks_fr_v2.sql b/models/streamline/bronze/core/bronze__blocks_fr_v2.sql new file mode 100644 index 0000000..236bdd0 --- /dev/null +++ b/models/streamline/bronze/core/bronze__blocks_fr_v2.sql @@ -0,0 +1,34 @@ +{# Set variables #} +{% set source_name = 'BLOCKS' %} +{% set source_version = 'V2'%} +{% set model_type = 'FR' %} + +{%- set default_vars = set_default_variables_bronze(source_name, model_type) -%} + +{% set partition_function = default_vars['partition_function'] %} +{% set partition_join_key = default_vars['partition_join_key'] %} +{% set balances = default_vars['balances'] %} +{% set block_number = default_vars['block_number'] %} +{% set uses_receipts_by_hash = default_vars['uses_receipts_by_hash'] %} + +{# Log configuration details #} +{{ log_model_details( + vars = default_vars +) }} + +{# Set up dbt configuration #} +{{ config ( + materialized = 'view', + tags = ['bronze_core'] +) }} + +{# Main query starts here #} +{{ streamline_external_table_query_fr( + source_name = source_name.lower(), + source_version = source_version.lower(), + partition_function = partition_function, + partition_join_key = partition_join_key, + balances = balances, + block_number = block_number, + uses_receipts_by_hash = uses_receipts_by_hash +) }} \ No newline at end of file diff --git a/models/streamline/bronze/core/bronze__confirm_blocks.sql b/models/streamline/bronze/core/bronze__confirm_blocks.sql new file mode 100644 index 0000000..dbbd699 --- /dev/null +++ b/models/streamline/bronze/core/bronze__confirm_blocks.sql @@ -0,0 +1,39 @@ +{# Set variables #} +{% set source_name = 'CONFIRM_BLOCKS' %} +{% set source_version = '' %} +{% set model_type = '' %} + +{%- set default_vars = set_default_variables_bronze(source_name, model_type) -%} + +{% set partition_function = default_vars['partition_function'] %} +{% set partition_join_key = default_vars['partition_join_key'] %} +{% set balances = default_vars['balances'] %} +{% set block_number = default_vars['block_number'] %} +{% set uses_receipts_by_hash = default_vars['uses_receipts_by_hash'] %} + +{# Log configuration details #} +{{ log_bronze_details( + source_name = source_name, + source_version = source_version, + model_type = model_type, + partition_function = partition_function, + partition_join_key = partition_join_key, + block_number = block_number, + uses_receipts_by_hash = uses_receipts_by_hash +) }} + +{# Set up dbt configuration #} +{{ config ( + materialized = 'view', + tags = ['bronze_core'] +) }} + +{# Main query starts here #} +{{ streamline_external_table_query( + source_name = source_name.lower(), + source_version = source_version.lower(), + partition_function = partition_function, + balances = balances, + block_number = block_number, + uses_receipts_by_hash = uses_receipts_by_hash +) }} \ No newline at end of file diff --git a/models/streamline/bronze/core/bronze__confirm_blocks_fr.sql b/models/streamline/bronze/core/bronze__confirm_blocks_fr.sql new file mode 100644 index 0000000..c3aa721 --- /dev/null +++ b/models/streamline/bronze/core/bronze__confirm_blocks_fr.sql @@ -0,0 +1,15 @@ +{{ config ( + materialized = 'view', + tags = ['bronze_core'] +) }} + +SELECT + partition_key, + block_number, + VALUE, + DATA, + metadata, + file_name, + _inserted_timestamp +FROM + {{ ref('bronze__confirm_blocks_fr_v2') }} \ No newline at end of file diff --git a/models/streamline/bronze/core/bronze__confirm_blocks_fr_v2.sql b/models/streamline/bronze/core/bronze__confirm_blocks_fr_v2.sql new file mode 100644 index 0000000..894d1ad --- /dev/null +++ b/models/streamline/bronze/core/bronze__confirm_blocks_fr_v2.sql @@ -0,0 +1,40 @@ +{# Set variables #} +{% set source_name = 'CONFIRM_BLOCKS' %} +{% set source_version = '' %} +{% set model_type = 'FR' %} + +{%- set default_vars = set_default_variables_bronze(source_name, model_type) -%} + +{% set partition_function = default_vars['partition_function'] %} +{% set partition_join_key = default_vars['partition_join_key'] %} +{% set balances = default_vars['balances'] %} +{% set block_number = default_vars['block_number'] %} +{% set uses_receipts_by_hash = default_vars['uses_receipts_by_hash'] %} + +{# Log configuration details #} +{{ log_bronze_details( + source_name = source_name, + source_version = source_version, + model_type = model_type, + partition_function = partition_function, + partition_join_key = partition_join_key, + block_number = block_number, + uses_receipts_by_hash = uses_receipts_by_hash +) }} + +{# Set up dbt configuration #} +{{ config ( + materialized = 'view', + tags = ['bronze_core'] +) }} + +{# Main query starts here #} +{{ streamline_external_table_query_fr( + source_name = source_name.lower(), + source_version = source_version.lower(), + partition_function = partition_function, + partition_join_key = partition_join_key, + balances = balances, + block_number = block_number, + uses_receipts_by_hash = uses_receipts_by_hash +) }} \ No newline at end of file diff --git a/models/streamline/bronze/core/bronze__receipts.sql b/models/streamline/bronze/core/bronze__receipts.sql new file mode 100644 index 0000000..e1fafed --- /dev/null +++ b/models/streamline/bronze/core/bronze__receipts.sql @@ -0,0 +1,39 @@ +{# Set variables #} +{% set source_name = 'RECEIPTS' %} +{% set source_version = '' %} +{% set model_type = '' %} + +{%- set default_vars = set_default_variables_bronze(source_name, model_type) -%} + +{% set partition_function = default_vars['partition_function'] %} +{% set partition_join_key = default_vars['partition_join_key'] %} +{% set balances = default_vars['balances'] %} +{% set block_number = default_vars['block_number'] %} +{% set uses_receipts_by_hash = default_vars['uses_receipts_by_hash'] %} + +{# Log configuration details #} +{{ log_bronze_details( + source_name = source_name, + source_version = source_version, + model_type = model_type, + partition_function = partition_function, + partition_join_key = partition_join_key, + block_number = block_number, + uses_receipts_by_hash = uses_receipts_by_hash +) }} + +{# Set up dbt configuration #} +{{ config ( + materialized = 'view', + tags = ['bronze_receipts'] +) }} + +{# Main query starts here #} +{{ streamline_external_table_query( + source_name = source_name.lower(), + source_version = source_version.lower(), + partition_function = partition_function, + balances = balances, + block_number = block_number, + uses_receipts_by_hash = uses_receipts_by_hash +) }} \ No newline at end of file diff --git a/models/streamline/bronze/core/bronze__receipts_fr.sql b/models/streamline/bronze/core/bronze__receipts_fr.sql new file mode 100644 index 0000000..c62f29f --- /dev/null +++ b/models/streamline/bronze/core/bronze__receipts_fr.sql @@ -0,0 +1,16 @@ +{{ config ( + materialized = 'view', + tags = ['bronze_receipts'] +) }} + +SELECT + partition_key, + block_number, + array_index, + VALUE, + DATA, + metadata, + file_name, + _inserted_timestamp +FROM + {{ ref('bronze__receipts_fr_v2') }} \ No newline at end of file diff --git a/models/streamline/bronze/core/bronze__receipts_fr_v2.sql b/models/streamline/bronze/core/bronze__receipts_fr_v2.sql new file mode 100644 index 0000000..dccd967 --- /dev/null +++ b/models/streamline/bronze/core/bronze__receipts_fr_v2.sql @@ -0,0 +1,40 @@ +{# Set variables #} +{% set source_name = 'RECEIPTS' %} +{% set source_version = '' %} +{% set model_type = 'FR' %} + +{%- set default_vars = set_default_variables_bronze(source_name, model_type) -%} + +{% set partition_function = default_vars['partition_function'] %} +{% set partition_join_key = default_vars['partition_join_key'] %} +{% set balances = default_vars['balances'] %} +{% set block_number = default_vars['block_number'] %} +{% set uses_receipts_by_hash = default_vars['uses_receipts_by_hash'] %} + +{# Log configuration details #} +{{ log_bronze_details( + source_name = source_name, + source_version = source_version, + model_type = model_type, + partition_function = partition_function, + partition_join_key = partition_join_key, + block_number = block_number, + uses_receipts_by_hash = uses_receipts_by_hash +) }} + +{# Set up dbt configuration #} +{{ config ( + materialized = 'view', + tags = ['bronze_receipts'] +) }} + +{# Main query starts here #} +{{ streamline_external_table_query_fr( + source_name = source_name.lower(), + source_version = source_version.lower(), + partition_function = partition_function, + partition_join_key = partition_join_key, + balances = balances, + block_number = block_number, + uses_receipts_by_hash = uses_receipts_by_hash +) }} \ No newline at end of file diff --git a/models/streamline/bronze/core/bronze__streamline_blocks.sql b/models/streamline/bronze/core/bronze__streamline_blocks.sql deleted file mode 100644 index 1651e04..0000000 --- a/models/streamline/bronze/core/bronze__streamline_blocks.sql +++ /dev/null @@ -1,8 +0,0 @@ -{{ config ( - materialized = 'view' -) }} - -{{ streamline_external_table_query_v2( - model = "blocks", - partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER )" -) }} \ No newline at end of file diff --git a/models/streamline/bronze/core/bronze__streamline_confirm_blocks.sql b/models/streamline/bronze/core/bronze__streamline_confirm_blocks.sql deleted file mode 100644 index 63aff87..0000000 --- a/models/streamline/bronze/core/bronze__streamline_confirm_blocks.sql +++ /dev/null @@ -1,8 +0,0 @@ -{{ config ( - materialized = 'view' -) }} - -{{ streamline_external_table_query_v2( - model = "confirm_blocks", - partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER )" -) }} diff --git a/models/streamline/bronze/core/bronze__streamline_receipts.sql b/models/streamline/bronze/core/bronze__streamline_receipts.sql deleted file mode 100644 index 58e45de..0000000 --- a/models/streamline/bronze/core/bronze__streamline_receipts.sql +++ /dev/null @@ -1,8 +0,0 @@ -{{ config ( - materialized = 'view' -) }} - -{{ streamline_external_table_query_v2( - model = "receipts", - partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER )" -) }} diff --git a/models/streamline/bronze/core/bronze__streamline_traces.sql b/models/streamline/bronze/core/bronze__streamline_traces.sql deleted file mode 100644 index 6f3dc29..0000000 --- a/models/streamline/bronze/core/bronze__streamline_traces.sql +++ /dev/null @@ -1,7 +0,0 @@ -{{ config ( - materialized = 'view' -) }} -{{ fsc_evm.streamline_external_table_query( - model = "traces", - partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER )" -) }} diff --git a/models/streamline/bronze/core/bronze__streamline_transactions.sql b/models/streamline/bronze/core/bronze__streamline_transactions.sql deleted file mode 100644 index c318987..0000000 --- a/models/streamline/bronze/core/bronze__streamline_transactions.sql +++ /dev/null @@ -1,8 +0,0 @@ -{{ config ( - materialized = 'view' -) }} - -{{ streamline_external_table_query_v2( - model = "transactions", - partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER )" -) }} diff --git a/models/streamline/bronze/core/bronze__traces.sql b/models/streamline/bronze/core/bronze__traces.sql new file mode 100644 index 0000000..217fe56 --- /dev/null +++ b/models/streamline/bronze/core/bronze__traces.sql @@ -0,0 +1,39 @@ +{# Set variables #} +{% set source_name = 'TRACES' %} +{% set source_version = '' %} +{% set model_type = '' %} + +{%- set default_vars = set_default_variables_bronze(source_name, model_type) -%} + +{% set partition_function = default_vars['partition_function'] %} +{% set partition_join_key = default_vars['partition_join_key'] %} +{% set balances = default_vars['balances'] %} +{% set block_number = default_vars['block_number'] %} +{% set uses_receipts_by_hash = default_vars['uses_receipts_by_hash'] %} + +{# Log configuration details #} +{{ log_bronze_details( + source_name = source_name, + source_version = source_version, + model_type = model_type, + partition_function = partition_function, + partition_join_key = partition_join_key, + block_number = block_number, + uses_receipts_by_hash = uses_receipts_by_hash +) }} + +{# Set up dbt configuration #} +{{ config ( + materialized = 'view', + tags = ['bronze_core'] +) }} + +{# Main query starts here #} +{{ streamline_external_table_query( + source_name = source_name.lower(), + source_version = source_version.lower(), + partition_function = partition_function, + balances = balances, + block_number = block_number, + uses_receipts_by_hash = uses_receipts_by_hash +) }} \ No newline at end of file diff --git a/models/streamline/bronze/core/bronze__traces_fr.sql b/models/streamline/bronze/core/bronze__traces_fr.sql new file mode 100644 index 0000000..73bf524 --- /dev/null +++ b/models/streamline/bronze/core/bronze__traces_fr.sql @@ -0,0 +1,16 @@ +{{ config ( + materialized = 'view', + tags = ['bronze_core'] +) }} + +SELECT + partition_key, + block_number, + array_index, + VALUE, + DATA, + metadata, + file_name, + _inserted_timestamp +FROM + {{ ref('bronze__traces_fr_v2') }} \ No newline at end of file diff --git a/models/streamline/bronze/core/bronze__traces_fr_v2.sql b/models/streamline/bronze/core/bronze__traces_fr_v2.sql new file mode 100644 index 0000000..a65d208 --- /dev/null +++ b/models/streamline/bronze/core/bronze__traces_fr_v2.sql @@ -0,0 +1,40 @@ +{# Set variables #} +{% set source_name = 'TRACES' %} +{% set source_version = '' %} +{% set model_type = 'FR' %} + +{%- set default_vars = set_default_variables_bronze(source_name, model_type) -%} + +{% set partition_function = default_vars['partition_function'] %} +{% set partition_join_key = default_vars['partition_join_key'] %} +{% set balances = default_vars['balances'] %} +{% set block_number = default_vars['block_number'] %} +{% set uses_receipts_by_hash = default_vars['uses_receipts_by_hash'] %} + +{# Log configuration details #} +{{ log_bronze_details( + source_name = source_name, + source_version = source_version, + model_type = model_type, + partition_function = partition_function, + partition_join_key = partition_join_key, + block_number = block_number, + uses_receipts_by_hash = uses_receipts_by_hash +) }} + +{# Set up dbt configuration #} +{{ config ( + materialized = 'view', + tags = ['bronze_core'] +) }} + +{# Main query starts here #} +{{ streamline_external_table_query_fr( + source_name = source_name.lower(), + source_version = source_version.lower(), + partition_function = partition_function, + partition_join_key = partition_join_key, + balances = balances, + block_number = block_number, + uses_receipts_by_hash = uses_receipts_by_hash +) }} \ No newline at end of file diff --git a/models/streamline/bronze/core/bronze__transactions.sql b/models/streamline/bronze/core/bronze__transactions.sql new file mode 100644 index 0000000..cac3fbc --- /dev/null +++ b/models/streamline/bronze/core/bronze__transactions.sql @@ -0,0 +1,39 @@ +{# Set variables #} +{% set source_name = 'TRANSACTIONS' %} +{% set source_version = 'V2' %} +{% set model_type = '' %} + +{%- set default_vars = set_default_variables_bronze(source_name, model_type) -%} + +{% set partition_function = default_vars['partition_function'] %} +{% set partition_join_key = default_vars['partition_join_key'] %} +{% set balances = default_vars['balances'] %} +{% set block_number = default_vars['block_number'] %} +{% set uses_receipts_by_hash = default_vars['uses_receipts_by_hash'] %} + +{# Log configuration details #} +{{ log_bronze_details( + source_name = source_name, + source_version = source_version, + model_type = model_type, + partition_function = partition_function, + partition_join_key = partition_join_key, + block_number = block_number, + uses_receipts_by_hash = uses_receipts_by_hash +) }} + +{# Set up dbt configuration #} +{{ config ( + materialized = 'view', + tags = ['bronze_core'] +) }} + +{# Main query starts here #} +{{ streamline_external_table_query( + source_name = source_name.lower(), + source_version = source_version.lower(), + partition_function = partition_function, + balances = balances, + block_number = block_number, + uses_receipts_by_hash = uses_receipts_by_hash +) }} \ No newline at end of file diff --git a/models/streamline/bronze/core/bronze__transactions_fr.sql b/models/streamline/bronze/core/bronze__transactions_fr.sql new file mode 100644 index 0000000..99be7b5 --- /dev/null +++ b/models/streamline/bronze/core/bronze__transactions_fr.sql @@ -0,0 +1,26 @@ +{{ config ( + materialized = 'view', + tags = ['bronze_core'] +) }} + +SELECT + partition_key, + block_number, + VALUE, + DATA, + metadata, + file_name, + _inserted_timestamp +FROM + {{ ref('bronze__transactions_fr_v2') }} +UNION ALL +SELECT + partition_key, + block_number, + VALUE, + DATA, + metadata, + file_name, + _inserted_timestamp +FROM + {{ ref('bronze__transactions_fr_v1') }} diff --git a/models/streamline/bronze/core/bronze__transactions_fr_v1.sql b/models/streamline/bronze/core/bronze__transactions_fr_v1.sql new file mode 100644 index 0000000..b599f0f --- /dev/null +++ b/models/streamline/bronze/core/bronze__transactions_fr_v1.sql @@ -0,0 +1,40 @@ +{# Set variables #} +{% set source_name = 'TRANSACTIONS' %} +{% set source_version = '' %} +{% set model_type = 'FR' %} + +{%- set default_vars = set_default_variables_bronze(source_name, model_type) -%} + +{% set partition_function = default_vars['partition_function'] %} +{% set partition_join_key = default_vars['partition_join_key'] %} +{% set balances = default_vars['balances'] %} +{% set block_number = default_vars['block_number'] %} +{% set uses_receipts_by_hash = default_vars['uses_receipts_by_hash'] %} + +{# Log configuration details #} +{{ log_bronze_details( + source_name = source_name, + source_version = source_version, + model_type = model_type, + partition_function = partition_function, + partition_join_key = partition_join_key, + block_number = block_number, + uses_receipts_by_hash = uses_receipts_by_hash +) }} + +{# Set up dbt configuration #} +{{ config ( + materialized = 'view', + tags = ['bronze_core'] +) }} + +{# Main query starts here #} +{{ streamline_external_table_query_fr( + source_name = source_name.lower(), + source_version = source_version.lower(), + partition_function = partition_function, + partition_join_key = partition_join_key, + balances = balances, + block_number = block_number, + uses_receipts_by_hash = uses_receipts_by_hash +) }} \ No newline at end of file diff --git a/models/streamline/bronze/core/bronze__transactions_fr_v2.sql b/models/streamline/bronze/core/bronze__transactions_fr_v2.sql new file mode 100644 index 0000000..576a811 --- /dev/null +++ b/models/streamline/bronze/core/bronze__transactions_fr_v2.sql @@ -0,0 +1,34 @@ +{# Set variables #} +{% set source_name = 'TRANSACTIONS' %} +{% set source_version = 'V2'%} +{% set model_type = 'FR' %} + +{%- set default_vars = set_default_variables_bronze(source_name, model_type) -%} + +{% set partition_function = default_vars['partition_function'] %} +{% set partition_join_key = default_vars['partition_join_key'] %} +{% set balances = default_vars['balances'] %} +{% set block_number = default_vars['block_number'] %} +{% set uses_receipts_by_hash = default_vars['uses_receipts_by_hash'] %} + +{# Log configuration details #} +{{ log_model_details( + vars = default_vars +) }} + +{# Set up dbt configuration #} +{{ config ( + materialized = 'view', + tags = ['bronze_core'] +) }} + +{# Main query starts here #} +{{ streamline_external_table_query_fr( + source_name = source_name.lower(), + source_version = source_version.lower(), + partition_function = partition_function, + partition_join_key = partition_join_key, + balances = balances, + block_number = block_number, + uses_receipts_by_hash = uses_receipts_by_hash +) }} \ No newline at end of file diff --git a/models/streamline/bronze/core/fr/bronze__streamline_fr_blocks.sql b/models/streamline/bronze/core/fr/bronze__streamline_fr_blocks.sql deleted file mode 100644 index 189c765..0000000 --- a/models/streamline/bronze/core/fr/bronze__streamline_fr_blocks.sql +++ /dev/null @@ -1,8 +0,0 @@ -{{ config ( - materialized = 'view' -) }} - -{{ streamline_external_table_fr_query_v2( - model = "blocks", - partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER )" -) }} diff --git a/models/streamline/bronze/core/fr/bronze__streamline_fr_confirm_blocks.sql b/models/streamline/bronze/core/fr/bronze__streamline_fr_confirm_blocks.sql deleted file mode 100644 index 2fdc551..0000000 --- a/models/streamline/bronze/core/fr/bronze__streamline_fr_confirm_blocks.sql +++ /dev/null @@ -1,8 +0,0 @@ -{{ config ( - materialized = 'view' -) }} - -{{ streamline_external_table_fr_query_v2( - model = "confirm_blocks", - partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER )" -) }} diff --git a/models/streamline/bronze/core/fr/bronze__streamline_fr_receipts.sql b/models/streamline/bronze/core/fr/bronze__streamline_fr_receipts.sql deleted file mode 100644 index 6183083..0000000 --- a/models/streamline/bronze/core/fr/bronze__streamline_fr_receipts.sql +++ /dev/null @@ -1,8 +0,0 @@ -{{ config ( - materialized = 'view' -) }} - -{{ streamline_external_table_fr_query_v2( - model = "receipts", - partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER )" -) }} diff --git a/models/streamline/bronze/core/fr/bronze__streamline_fr_traces.sql b/models/streamline/bronze/core/fr/bronze__streamline_fr_traces.sql deleted file mode 100644 index 9d965bf..0000000 --- a/models/streamline/bronze/core/fr/bronze__streamline_fr_traces.sql +++ /dev/null @@ -1,7 +0,0 @@ -{{ config ( - materialized = 'view' -) }} -{{ fsc_evm.streamline_external_table_fr_query( - model = "traces", - partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER )" -) }} diff --git a/models/streamline/bronze/core/fr/bronze__streamline_fr_transactions.sql b/models/streamline/bronze/core/fr/bronze__streamline_fr_transactions.sql deleted file mode 100644 index 67bfde9..0000000 --- a/models/streamline/bronze/core/fr/bronze__streamline_fr_transactions.sql +++ /dev/null @@ -1,8 +0,0 @@ -{{ config ( - materialized = 'view' -) }} - -{{ streamline_external_table_fr_query_v2( - model = 'transactions', - partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER )" -) }} diff --git a/models/streamline/bronze/decoder/bronze__decoded_logs.sql b/models/streamline/bronze/decoder/bronze__decoded_logs.sql index bd43f6f..7f87ae3 100644 --- a/models/streamline/bronze/decoder/bronze__decoded_logs.sql +++ b/models/streamline/bronze/decoder/bronze__decoded_logs.sql @@ -1,41 +1,23 @@ -{{ config ( - materialized = 'view' +{# Set variables #} +{% set source_name = 'DECODED_LOGS' %} +{% set source_version = '' %} +{% set model_type = '' %} + +{%- set default_vars = set_default_variables_bronze(source_name, model_type) -%} + +{# Log configuration details #} +{{ log_model_details( + vars = default_vars ) }} -WITH meta AS ( +{# Set up dbt configuration #} +{{ config ( + materialized = 'view', + tags = ['bronze_decoded_logs'] +) }} - SELECT - last_modified AS _inserted_timestamp, - file_name, - CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 6), '_', 1) AS INTEGER) AS _partition_by_block_number, - TO_DATE( - concat_ws('-', SPLIT_PART(file_name, '/', 3), SPLIT_PART(file_name, '/', 4), SPLIT_PART(file_name, '/', 5)) - ) AS _partition_by_created_date - FROM - TABLE( - information_schema.external_table_file_registration_history( - start_time => DATEADD('day', -3, CURRENT_TIMESTAMP()), - table_name => '{{ source( "bronze_streamline", "decoded_logs") }}') - ) A - ) - SELECT - block_number, - id :: STRING AS id, - DATA, - _inserted_timestamp, - s._partition_by_block_number AS _partition_by_block_number, - s._partition_by_created_date AS _partition_by_created_date - FROM - {{ source( - "bronze_streamline", - "decoded_logs" - ) }} - s - JOIN meta b - ON b.file_name = metadata$filename - AND b._partition_by_block_number = s._partition_by_block_number - AND b._partition_by_created_date = s._partition_by_created_date - WHERE - b._partition_by_block_number = s._partition_by_block_number - AND b._partition_by_created_date = s._partition_by_created_date - AND s._partition_by_created_date >= DATEADD('day', -2, CURRENT_TIMESTAMP()) +{# Main query starts here #} +{{ streamline_external_table_query_decoder( + source_name = source_name.lower(), + source_version = source_version.lower() +) }} \ No newline at end of file diff --git a/models/streamline/bronze/decoder/bronze__decoded_logs_fr.sql b/models/streamline/bronze/decoder/bronze__decoded_logs_fr.sql new file mode 100644 index 0000000..5fa36e2 --- /dev/null +++ b/models/streamline/bronze/decoder/bronze__decoded_logs_fr.sql @@ -0,0 +1,13 @@ +{# Log configuration details #} +{{ log_model_details() }} + +{# Set up dbt configuration #} +{{ config ( + materialized = 'view', + tags = ['bronze_decoded_logs'] +) }} + +SELECT + * +FROM + {{ ref('bronze__decoded_logs_fr_v2') }} \ No newline at end of file diff --git a/models/streamline/bronze/decoder/bronze__decoded_logs_fr_v2.sql b/models/streamline/bronze/decoder/bronze__decoded_logs_fr_v2.sql new file mode 100644 index 0000000..1aa6b6e --- /dev/null +++ b/models/streamline/bronze/decoder/bronze__decoded_logs_fr_v2.sql @@ -0,0 +1,23 @@ +{# Set variables #} +{% set source_name = 'DECODED_LOGS' %} +{% set source_version = '' %} +{% set model_type = 'FR' %} + +{%- set default_vars = set_default_variables_bronze(source_name, model_type) -%} + +{# Log configuration details #} +{{ log_model_details( + vars = default_vars +) }} + +{# Set up dbt configuration #} +{{ config ( + materialized = 'view', + tags = ['bronze_decoded_logs'] +) }} + +{# Main query starts here #} +{{ streamline_external_table_query_decoder_fr( + source_name = source_name.lower(), + source_version = source_version.lower() +) }} \ No newline at end of file diff --git a/models/streamline/bronze/decoder/bronze__fr_decoded_logs.sql b/models/streamline/bronze/decoder/bronze__fr_decoded_logs.sql deleted file mode 100644 index 4e4a1c8..0000000 --- a/models/streamline/bronze/decoder/bronze__fr_decoded_logs.sql +++ /dev/null @@ -1,40 +0,0 @@ -{{ config ( - materialized = 'view' -) }} - -WITH meta AS ( - - SELECT - registered_on AS _inserted_timestamp, - file_name, - CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 6), '_', 1) AS INTEGER) AS _partition_by_block_number, - TO_DATE( - concat_ws('-', SPLIT_PART(file_name, '/', 3), SPLIT_PART(file_name, '/', 4), SPLIT_PART(file_name, '/', 5)) - ) AS _partition_by_created_date - FROM - TABLE( - information_schema.external_table_files( - table_name => '{{ source( "bronze_streamline", "decoded_logs") }}' - ) - ) A -) -SELECT - block_number, - id :: STRING AS id, - DATA, - _inserted_timestamp, - s._partition_by_block_number AS _partition_by_block_number, - s._partition_by_created_date AS _partition_by_created_date -FROM - {{ source( - "bronze_streamline", - "decoded_logs" - ) }} - s - JOIN meta b - ON b.file_name = metadata$filename - AND b._partition_by_block_number = s._partition_by_block_number - AND b._partition_by_created_date = s._partition_by_created_date -WHERE - b._partition_by_block_number = s._partition_by_block_number - AND b._partition_by_created_date = s._partition_by_created_date diff --git a/models/streamline/silver/_block_lookback.sql b/models/streamline/silver/_block_lookback.sql index 41f2108..f37bcf6 100644 --- a/models/streamline/silver/_block_lookback.sql +++ b/models/streamline/silver/_block_lookback.sql @@ -3,7 +3,7 @@ ) }} SELECT - COALESCE(MIN(block_number), 0) AS block_number + MIN(block_number) AS block_number FROM {{ ref("silver__blocks") }} WHERE diff --git a/models/streamline/silver/_max_block_by_date.sql b/models/streamline/silver/_max_block_by_date.sql index 7609b67..a56cf82 100644 --- a/models/streamline/silver/_max_block_by_date.sql +++ b/models/streamline/silver/_max_block_by_date.sql @@ -24,4 +24,4 @@ WHERE MAX(block_date) FROM base - ) + ) \ No newline at end of file diff --git a/models/streamline/silver/core/complete/streamline__blocks_complete.sql b/models/streamline/silver/core/complete/streamline__blocks_complete.sql new file mode 100644 index 0000000..4c8977d --- /dev/null +++ b/models/streamline/silver/core/complete/streamline__blocks_complete.sql @@ -0,0 +1,50 @@ +{# Set variables #} +{%- set source_name = 'BLOCKS' -%} +{%- set model_type = 'COMPLETE' -%} + +{%- set full_refresh_type = var((source_name ~ '_complete_full_refresh').upper(), false) -%} + +{% set post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(block_number)" %} + +{# Log configuration details #} +{{ log_complete_details( + post_hook = post_hook, + full_refresh_type = full_refresh_type +) }} + +{# Set up dbt configuration #} +-- depends_on: {{ ref('bronze__' ~ source_name.lower()) }} + +{{ config ( + materialized = "incremental", + unique_key = "block_number", + cluster_by = "ROUND(block_number, -3)", + post_hook = post_hook, + full_refresh = full_refresh_type, + tags = ['streamline_core_complete'] +) }} + +{# Main query starts here #} +SELECT + block_number, + file_name, + {{ dbt_utils.generate_surrogate_key(['block_number']) }} AS complete_{{ source_name.lower() }}_id, + SYSDATE() AS inserted_timestamp, + SYSDATE() AS modified_timestamp, + _inserted_timestamp, + '{{ invocation_id }}' AS _invocation_id +FROM + {% if is_incremental() %} + {{ ref('bronze__' ~ source_name.lower()) }} + WHERE + _inserted_timestamp >= ( + SELECT + COALESCE(MAX(_inserted_timestamp), '1970-01-01'::TIMESTAMP) AS _inserted_timestamp + FROM + {{ this }} + ) + {% else %} + {{ ref('bronze__' ~ source_name.lower() ~ '_fr') }} + {% endif %} + +QUALIFY (ROW_NUMBER() OVER (PARTITION BY block_number ORDER BY _inserted_timestamp DESC)) = 1 diff --git a/models/streamline/silver/core/complete/streamline__complete_blocks.sql b/models/streamline/silver/core/complete/streamline__complete_blocks.sql deleted file mode 100644 index c602180..0000000 --- a/models/streamline/silver/core/complete/streamline__complete_blocks.sql +++ /dev/null @@ -1,42 +0,0 @@ --- depends_on: {{ ref('bronze__streamline_blocks') }} -{{ config ( - materialized = "incremental", - unique_key = "block_number", - cluster_by = "ROUND(block_number, -3)", - post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(block_number)", - tags = ['streamline_core_complete'] -) }} - -SELECT - COALESCE( - VALUE :BLOCK_NUMBER :: INT, - metadata :request :"data" :id :: INT, - PARSE_JSON( - metadata :request :"data" - ) :id :: INT - ) AS block_number, - {{ dbt_utils.generate_surrogate_key( - ['block_number'] - ) }} AS complete_blocks_id, - SYSDATE() AS inserted_timestamp, - SYSDATE() AS modified_timestamp, - _inserted_timestamp, - '{{ invocation_id }}' AS _invocation_id -FROM - -{% if is_incremental() %} -{{ ref('bronze__streamline_blocks') }} -WHERE - _inserted_timestamp >= ( - SELECT - MAX(_inserted_timestamp) _inserted_timestamp - FROM - {{ this }} - ) -{% else %} - {{ ref('bronze__streamline_fr_blocks') }} -{% endif %} - -qualify(ROW_NUMBER() over (PARTITION BY block_number -ORDER BY - _inserted_timestamp DESC)) = 1 diff --git a/models/streamline/silver/core/complete/streamline__complete_confirmed_blocks.sql b/models/streamline/silver/core/complete/streamline__complete_confirmed_blocks.sql deleted file mode 100644 index c8a9741..0000000 --- a/models/streamline/silver/core/complete/streamline__complete_confirmed_blocks.sql +++ /dev/null @@ -1,40 +0,0 @@ --- depends_on: {{ ref('bronze__streamline_confirm_blocks') }} -{{ config ( - materialized = "incremental", - unique_key = "block_number", - cluster_by = "ROUND(block_number, -3)", - tags = ['streamline_core_complete'] -) }} - -SELECT - COALESCE( - VALUE :BLOCK_NUMBER :: INT, - metadata :request :"data" :id :: INT, - PARSE_JSON( - metadata :request :"data" - ) :id :: INT - ) AS block_number, - {{ dbt_utils.generate_surrogate_key( - ['block_number'] - ) }} AS complete_confirmed_blocks_id, - SYSDATE() AS inserted_timestamp, - SYSDATE() AS modified_timestamp, - _inserted_timestamp, - '{{ invocation_id }}' AS _invocation_id -FROM - -{% if is_incremental() %} -{{ ref('bronze__streamline_confirm_blocks') }} -WHERE - _inserted_timestamp >= ( - SELECT - COALESCE(MAX(_inserted_timestamp), '1970-01-01' :: TIMESTAMP) _inserted_timestamp - FROM - {{ this }}) - {% else %} - {{ ref('bronze__streamline_fr_confirm_blocks') }} - {% endif %} - - qualify(ROW_NUMBER() over (PARTITION BY block_number - ORDER BY - _inserted_timestamp DESC)) = 1 diff --git a/models/streamline/silver/core/complete/streamline__complete_receipts.sql b/models/streamline/silver/core/complete/streamline__complete_receipts.sql deleted file mode 100644 index f06e107..0000000 --- a/models/streamline/silver/core/complete/streamline__complete_receipts.sql +++ /dev/null @@ -1,42 +0,0 @@ --- depends_on: {{ ref('bronze__streamline_receipts') }} -{{ config ( - materialized = "incremental", - unique_key = "block_number", - cluster_by = "ROUND(block_number, -3)", - post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(block_number)", - tags = ['streamline_core_complete'] -) }} - -SELECT - COALESCE( - VALUE :BLOCK_NUMBER :: INT, - metadata :request :"data" :id :: INT, - PARSE_JSON( - metadata :request :"data" - ) :id :: INT - ) AS block_number, - {{ dbt_utils.generate_surrogate_key( - ['block_number'] - ) }} AS complete_receipts_id, - SYSDATE() AS inserted_timestamp, - SYSDATE() AS modified_timestamp, - _inserted_timestamp, - '{{ invocation_id }}' AS _invocation_id -FROM - -{% if is_incremental() %} -{{ ref('bronze__streamline_receipts') }} -WHERE - _inserted_timestamp >= ( - SELECT - MAX(_inserted_timestamp) _inserted_timestamp - FROM - {{ this }} - ) -{% else %} - {{ ref('bronze__streamline_fr_receipts') }} -{% endif %} - -qualify(ROW_NUMBER() over (PARTITION BY block_number -ORDER BY - _inserted_timestamp DESC)) = 1 diff --git a/models/streamline/silver/core/complete/streamline__complete_traces.sql b/models/streamline/silver/core/complete/streamline__complete_traces.sql deleted file mode 100644 index b38320c..0000000 --- a/models/streamline/silver/core/complete/streamline__complete_traces.sql +++ /dev/null @@ -1,42 +0,0 @@ --- depends_on: {{ ref('bronze__streamline_traces') }} -{{ config ( - materialized = "incremental", - unique_key = "block_number", - cluster_by = "ROUND(block_number, -3)", - post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(block_number)", - tags = ['streamline_core_complete'] -) }} - -SELECT - COALESCE( - VALUE :BLOCK_NUMBER :: INT, - metadata :request :"data" :id :: INT, - PARSE_JSON( - metadata :request :"data" - ) :id :: INT - ) AS block_number, - {{ dbt_utils.generate_surrogate_key( - ['block_number'] - ) }} AS complete_traces_id, - SYSDATE() AS inserted_timestamp, - SYSDATE() AS modified_timestamp, - _inserted_timestamp, - '{{ invocation_id }}' AS _invocation_id -FROM - -{% if is_incremental() %} -{{ ref('bronze__streamline_traces') }} -WHERE - _inserted_timestamp >= ( - SELECT - MAX(_inserted_timestamp) _inserted_timestamp - FROM - {{ this }} - ) -{% else %} - {{ ref('bronze__streamline_fr_traces') }} -{% endif %} - -qualify(ROW_NUMBER() over (PARTITION BY block_number -ORDER BY - _inserted_timestamp DESC)) = 1 diff --git a/models/streamline/silver/core/complete/streamline__complete_transactions.sql b/models/streamline/silver/core/complete/streamline__complete_transactions.sql deleted file mode 100644 index 9b09e40..0000000 --- a/models/streamline/silver/core/complete/streamline__complete_transactions.sql +++ /dev/null @@ -1,42 +0,0 @@ --- depends_on: {{ ref('bronze__streamline_transactions') }} -{{ config ( - materialized = "incremental", - unique_key = "block_number", - cluster_by = "ROUND(block_number, -3)", - post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(block_number)", - tags = ['streamline_core_complete'] -) }} - -SELECT - COALESCE( - VALUE :BLOCK_NUMBER :: INT, - metadata :request :"data" :id :: INT, - PARSE_JSON( - metadata :request :"data" - ) :id :: INT - ) AS block_number, - {{ dbt_utils.generate_surrogate_key( - ['block_number'] - ) }} AS complete_transactions_id, - SYSDATE() AS inserted_timestamp, - SYSDATE() AS modified_timestamp, - _inserted_timestamp, - '{{ invocation_id }}' AS _invocation_id -FROM - -{% if is_incremental() %} -{{ ref('bronze__streamline_transactions') }} -WHERE - _inserted_timestamp >= ( - SELECT - MAX(_inserted_timestamp) _inserted_timestamp - FROM - {{ this }} - ) -{% else %} - {{ ref('bronze__streamline_fr_transactions') }} -{% endif %} - -qualify(ROW_NUMBER() over (PARTITION BY block_number -ORDER BY - _inserted_timestamp DESC)) = 1 diff --git a/models/streamline/silver/core/complete/streamline__confirm_blocks_complete.sql b/models/streamline/silver/core/complete/streamline__confirm_blocks_complete.sql new file mode 100644 index 0000000..fe73ef2 --- /dev/null +++ b/models/streamline/silver/core/complete/streamline__confirm_blocks_complete.sql @@ -0,0 +1,50 @@ +{# Set variables #} +{%- set source_name = 'CONFIRM_BLOCKS' -%} +{%- set model_type = 'COMPLETE' -%} + +{%- set full_refresh_type = var((source_name ~ '_complete_full_refresh').upper(), false) -%} + +{% set post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(block_number)" %} + +{# Log configuration details #} +{{ log_complete_details( + post_hook = post_hook, + full_refresh_type = full_refresh_type +) }} + +{# Set up dbt configuration #} +-- depends_on: {{ ref('bronze__' ~ source_name.lower()) }} + +{{ config ( + materialized = "incremental", + unique_key = "block_number", + cluster_by = "ROUND(block_number, -3)", + post_hook = post_hook, + full_refresh = full_refresh_type, + tags = ['streamline_core_complete_confirm_blocks'] +) }} + +{# Main query starts here #} +SELECT + block_number, + file_name, + {{ dbt_utils.generate_surrogate_key(['block_number']) }} AS complete_{{ source_name.lower() }}_id, + SYSDATE() AS inserted_timestamp, + SYSDATE() AS modified_timestamp, + _inserted_timestamp, + '{{ invocation_id }}' AS _invocation_id +FROM + {% if is_incremental() %} + {{ ref('bronze__' ~ source_name.lower()) }} + WHERE + _inserted_timestamp >= ( + SELECT + COALESCE(MAX(_inserted_timestamp), '1970-01-01'::TIMESTAMP) AS _inserted_timestamp + FROM + {{ this }} + ) + {% else %} + {{ ref('bronze__' ~ source_name.lower() ~ '_fr') }} + {% endif %} + +QUALIFY (ROW_NUMBER() OVER (PARTITION BY block_number ORDER BY _inserted_timestamp DESC)) = 1 diff --git a/models/streamline/silver/core/complete/streamline__receipts_complete.sql b/models/streamline/silver/core/complete/streamline__receipts_complete.sql new file mode 100644 index 0000000..b77138d --- /dev/null +++ b/models/streamline/silver/core/complete/streamline__receipts_complete.sql @@ -0,0 +1,50 @@ +{# Set variables #} +{%- set source_name = 'RECEIPTS' -%} +{%- set model_type = 'COMPLETE' -%} + +{%- set full_refresh_type = var((source_name ~ '_complete_full_refresh').upper(), false) -%} + +{% set post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(block_number)" %} + +{# Log configuration details #} +{{ log_complete_details( + post_hook = post_hook, + full_refresh_type = full_refresh_type +) }} + +{# Set up dbt configuration #} +-- depends_on: {{ ref('bronze__' ~ source_name.lower()) }} + +{{ config ( + materialized = "incremental", + unique_key = "block_number", + cluster_by = "ROUND(block_number, -3)", + post_hook = post_hook, + full_refresh = full_refresh_type, + tags = ['streamline_core_complete_receipts'] +) }} + +{# Main query starts here #} +SELECT + block_number, + file_name, + {{ dbt_utils.generate_surrogate_key(['block_number']) }} AS complete_{{ source_name.lower() }}_id, + SYSDATE() AS inserted_timestamp, + SYSDATE() AS modified_timestamp, + _inserted_timestamp, + '{{ invocation_id }}' AS _invocation_id +FROM + {% if is_incremental() %} + {{ ref('bronze__' ~ source_name.lower()) }} + WHERE + _inserted_timestamp >= ( + SELECT + COALESCE(MAX(_inserted_timestamp), '1970-01-01'::TIMESTAMP) AS _inserted_timestamp + FROM + {{ this }} + ) + {% else %} + {{ ref('bronze__' ~ source_name.lower() ~ '_fr') }} + {% endif %} + +QUALIFY (ROW_NUMBER() OVER (PARTITION BY block_number ORDER BY _inserted_timestamp DESC)) = 1 diff --git a/models/streamline/silver/core/complete/streamline__traces_complete.sql b/models/streamline/silver/core/complete/streamline__traces_complete.sql new file mode 100644 index 0000000..e715876 --- /dev/null +++ b/models/streamline/silver/core/complete/streamline__traces_complete.sql @@ -0,0 +1,50 @@ +{# Set variables #} +{%- set source_name = 'TRACES' -%} +{%- set model_type = 'COMPLETE' -%} + +{%- set full_refresh_type = var((source_name ~ '_complete_full_refresh').upper(), false) -%} + +{% set post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(block_number)" %} + +{# Log configuration details #} +{{ log_complete_details( + post_hook = post_hook, + full_refresh_type = full_refresh_type +) }} + +{# Set up dbt configuration #} +-- depends_on: {{ ref('bronze__' ~ source_name.lower()) }} + +{{ config ( + materialized = "incremental", + unique_key = "block_number", + cluster_by = "ROUND(block_number, -3)", + post_hook = post_hook, + full_refresh = full_refresh_type, + tags = ['streamline_core_complete'] +) }} + +{# Main query starts here #} +SELECT + block_number, + file_name, + {{ dbt_utils.generate_surrogate_key(['block_number']) }} AS complete_{{ source_name.lower() }}_id, + SYSDATE() AS inserted_timestamp, + SYSDATE() AS modified_timestamp, + _inserted_timestamp, + '{{ invocation_id }}' AS _invocation_id +FROM + {% if is_incremental() %} + {{ ref('bronze__' ~ source_name.lower()) }} + WHERE + _inserted_timestamp >= ( + SELECT + COALESCE(MAX(_inserted_timestamp), '1970-01-01'::TIMESTAMP) AS _inserted_timestamp + FROM + {{ this }} + ) + {% else %} + {{ ref('bronze__' ~ source_name.lower() ~ '_fr') }} + {% endif %} + +QUALIFY (ROW_NUMBER() OVER (PARTITION BY block_number ORDER BY _inserted_timestamp DESC)) = 1 \ No newline at end of file diff --git a/models/streamline/silver/core/complete/streamline__transactions_complete.sql b/models/streamline/silver/core/complete/streamline__transactions_complete.sql new file mode 100644 index 0000000..06ec7f7 --- /dev/null +++ b/models/streamline/silver/core/complete/streamline__transactions_complete.sql @@ -0,0 +1,50 @@ +{# Set variables #} +{%- set source_name = 'TRANSACTIONS' -%} +{%- set model_type = 'COMPLETE' -%} + +{%- set full_refresh_type = var((source_name ~ '_complete_full_refresh').upper(), false) -%} + +{% set post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(block_number)"%} + +{# Log configuration details #} +{{ log_complete_details( + post_hook = post_hook, + full_refresh_type = full_refresh_type +) }} + +{# Set up dbt configuration #} +-- depends_on: {{ ref('bronze__' ~ source_name.lower()) }} + +{{ config ( + materialized = "incremental", + unique_key = "block_number", + cluster_by = "ROUND(block_number, -3)", + post_hook = post_hook, + full_refresh = full_refresh_type, + tags = ['streamline_core_complete'] +) }} + +{# Main query starts here #} +SELECT + block_number, + file_name, + {{ dbt_utils.generate_surrogate_key(['block_number']) }} AS complete_{{ source_name.lower() }}_id, + SYSDATE() AS inserted_timestamp, + SYSDATE() AS modified_timestamp, + _inserted_timestamp, + '{{ invocation_id }}' AS _invocation_id +FROM + {% if is_incremental() %} + {{ ref('bronze__' ~ source_name.lower()) }} + WHERE + _inserted_timestamp >= ( + SELECT + COALESCE(MAX(_inserted_timestamp), '1970-01-01'::TIMESTAMP) AS _inserted_timestamp + FROM + {{ this }} + ) + {% else %} + {{ ref('bronze__' ~ source_name.lower() ~ '_fr') }} + {% endif %} + +QUALIFY (ROW_NUMBER() OVER (PARTITION BY block_number ORDER BY _inserted_timestamp DESC)) = 1 \ No newline at end of file diff --git a/models/streamline/silver/core/history/streamline__blocks_history.sql b/models/streamline/silver/core/history/streamline__blocks_history.sql deleted file mode 100644 index 182893e..0000000 --- a/models/streamline/silver/core/history/streamline__blocks_history.sql +++ /dev/null @@ -1,74 +0,0 @@ -{{ config ( - materialized = "view", - post_hook = fsc_utils.if_data_call_function_v2( - func = 'streamline.udf_bulk_rest_api_v2', - target = "{{this.schema}}.{{this.identifier}}", - params ={ "external_table" :"blocks", - "sql_limit" :"100000", - "producer_batch_size" :"100000", - "worker_batch_size" :"50000", - "sql_source" :"{{this.identifier}}" } - ), - tags = ['streamline_core_history'] -) }} - -WITH last_3_days AS ( - - SELECT - block_number - FROM - {{ ref("_block_lookback") }} -), -blocks AS ( - SELECT - block_number - FROM - {{ ref("streamline__blocks") }} - WHERE - block_number <= ( - SELECT - block_number - FROM - last_3_days - ) - EXCEPT - SELECT - block_number - FROM - {{ ref("streamline__complete_blocks") }} - WHERE - block_number <= ( - SELECT - block_number - FROM - last_3_days - ) -) -SELECT - block_number, - ROUND( - block_number, - -3 - ) AS partition_key, - {{ target.database }}.live.udf_api( - 'POST', - '{service}/{Authentication}', - OBJECT_CONSTRUCT( - 'Content-Type', - 'application/json' - ), - OBJECT_CONSTRUCT( - 'id', - block_number, - 'jsonrpc', - '2.0', - 'method', - 'eth_getBlockByNumber', - 'params', - ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number), FALSE)), - 'vault/prod/blast/mainnet' - ) AS request - FROM - blocks - ORDER BY - block_number ASC diff --git a/models/streamline/silver/core/history/streamline__blocks_transactions_history.sql b/models/streamline/silver/core/history/streamline__blocks_transactions_history.sql new file mode 100644 index 0000000..baed1d7 --- /dev/null +++ b/models/streamline/silver/core/history/streamline__blocks_transactions_history.sql @@ -0,0 +1,112 @@ +{# Set variables #} +{%- set model_name = 'BLOCKS_TRANSACTIONS' -%} +{%- set model_type = 'HISTORY' -%} + +{%- set default_vars = set_default_variables_streamline(model_name, model_type) -%} + +{# Set up parameters for the streamline process. These will come from the vars set in dbt_project.yml #} + +{%- set streamline_params = set_streamline_parameters( + model_name=model_name, + model_type=model_type +) -%} + +{%- set node_url = default_vars['node_url'] -%} +{%- set node_secret_path = default_vars['node_secret_path'] -%} +{%- set model_quantum_state = default_vars['model_quantum_state'] -%} +{%- set sql_limit = streamline_params['sql_limit'] -%} +{%- set testing_limit = default_vars['testing_limit'] -%} +{%- set order_by_clause = default_vars['order_by_clause'] -%} +{%- set new_build = default_vars['new_build'] -%} +{%- set method_params = streamline_params['method_params'] -%} +{%- set method = streamline_params['method'] -%} + +{# Log configuration details #} +{{ log_streamline_details( + model_name=model_name, + model_type=model_type, + node_url=node_url, + model_quantum_state=model_quantum_state, + sql_limit=sql_limit, + testing_limit=testing_limit, + order_by_clause=order_by_clause, + new_build=new_build, + streamline_params=streamline_params, + method_params=method_params, + method=method +) }} + +{# Set up dbt configuration #} +{{ config ( + materialized = "view", + post_hook = fsc_utils.if_data_call_function_v2( + func = 'streamline.udf_bulk_rest_api_v2', + target = "{{this.schema}}.{{this.identifier}}", + params = streamline_params + ), + tags = ['streamline_core_history'] +) }} + +{# Main query starts here #} +WITH +{% if not new_build %} + last_3_days AS ( + SELECT block_number + FROM {{ ref("_block_lookback") }} + ), +{% endif %} + +{# Identify blocks that need processing #} +to_do AS ( + SELECT block_number + FROM {{ ref("streamline__blocks") }} + WHERE + block_number IS NOT NULL + {% if not new_build %} + AND block_number <= (SELECT block_number FROM last_3_days) + {% endif %} + + EXCEPT + + SELECT block_number + FROM {{ ref("streamline__blocks_complete") }} b + INNER JOIN {{ ref("streamline__transactions_complete") }} t USING(block_number) + WHERE 1=1 + {% if not new_build %} + AND block_number <= (SELECT block_number FROM last_3_days) + {% endif %} +), +ready_blocks AS ( + SELECT block_number + FROM to_do + + {% if testing_limit is not none %} + LIMIT {{ testing_limit }} + {% endif %} +) + +{# Generate API requests for each block #} +SELECT + block_number, + ROUND(block_number, -3) AS partition_key, + live.udf_api( + 'POST', + '{{ node_url }}', + OBJECT_CONSTRUCT( + 'Content-Type', 'application/json', + 'fsc-quantum-state', '{{ model_quantum_state }}' + ), + OBJECT_CONSTRUCT( + 'id', block_number, + 'jsonrpc', '2.0', + 'method', '{{ method }}', + 'params', {{ method_params }} + ), + '{{ node_secret_path }}' + ) AS request +FROM + ready_blocks + +{{ order_by_clause }} + +LIMIT {{ sql_limit }} \ No newline at end of file diff --git a/models/streamline/silver/core/history/streamline__confirm_blocks_history.sql b/models/streamline/silver/core/history/streamline__confirm_blocks_history.sql new file mode 100644 index 0000000..d715b31 --- /dev/null +++ b/models/streamline/silver/core/history/streamline__confirm_blocks_history.sql @@ -0,0 +1,133 @@ +{# Set variables #} +{%- set model_name = 'CONFIRM_BLOCKS' -%} +{%- set model_type = 'HISTORY' -%} + +{%- set default_vars = set_default_variables_streamline(model_name, model_type) -%} + +{# Set up parameters for the streamline process. These will come from the vars set in dbt_project.yml #} +{%- set streamline_params = set_streamline_parameters( + model_name=model_name, + model_type=model_type +) -%} + +{%- set node_url = default_vars['node_url'] -%} +{%- set node_secret_path = default_vars['node_secret_path'] -%} +{%- set model_quantum_state = default_vars['model_quantum_state'] -%} +{%- set sql_limit = streamline_params['sql_limit'] -%} +{%- set testing_limit = default_vars['testing_limit'] -%} +{%- set order_by_clause = default_vars['order_by_clause'] -%} +{%- set new_build = default_vars['new_build'] -%} +{%- set method_params = streamline_params['method_params'] -%} +{%- set method = streamline_params['method'] -%} + +{# Log configuration details #} +{{ log_streamline_details( + model_name=model_name, + model_type=model_type, + node_url=node_url, + model_quantum_state=model_quantum_state, + sql_limit=sql_limit, + testing_limit=testing_limit, + order_by_clause=order_by_clause, + new_build=new_build, + streamline_params=streamline_params, + method_params=method_params, + method=method +) }} + +{# Set up dbt configuration #} +{{ config ( + materialized = "view", + post_hook = fsc_utils.if_data_call_function_v2( + func = 'streamline.udf_bulk_rest_api_v2', + target = "{{this.schema}}.{{this.identifier}}", + params = streamline_params + ), + tags = ['streamline_core_history_confirm_blocks'] +) }} + +{# Main query starts here #} +WITH +{% if not new_build %} + last_3_days AS ( + SELECT block_number + FROM {{ ref("_block_lookback") }} + ), +{% endif %} + +{# Delay blocks #} +look_back AS ( + SELECT + block_number + FROM + {{ ref("_max_block_by_hour") }} + qualify ROW_NUMBER() over ( + ORDER BY + block_number DESC + ) = 6 + ), + +{# Identify blocks that need processing #} +to_do AS ( + SELECT block_number + FROM {{ ref("streamline__blocks") }} + WHERE + block_number IS NOT NULL + AND block_number <= (SELECT block_number FROM look_back) + {% if not new_build %} + AND block_number <= (SELECT block_number FROM last_3_days) + {% endif %} + + EXCEPT + + {# Exclude blocks that have already been processed #} + SELECT block_number + FROM {{ ref('streamline__' ~ model_name.lower() ~ '_complete') }} + WHERE 1=1 + AND block_number IS NOT NULL + AND block_number <= (SELECT block_number FROM look_back) + AND _inserted_timestamp >= DATEADD( + 'day', + -4, + SYSDATE() + ) + {% if not new_build %} + AND block_number <= (SELECT block_number FROM last_3_days) + {% endif %} +) + +{# Prepare the final list of blocks to process #} +,ready_blocks AS ( + SELECT block_number + FROM to_do + + {% if testing_limit is not none %} + LIMIT {{ testing_limit }} + {% endif %} +) + +{# Generate API requests for each block #} +SELECT + block_number, + ROUND(block_number, -3) AS partition_key, + live.udf_api( + 'POST', + '{{ node_url }}', + OBJECT_CONSTRUCT( + 'Content-Type', 'application/json', + 'fsc-quantum-state', '{{ model_quantum_state }}' + ), + OBJECT_CONSTRUCT( + 'id', block_number, + 'jsonrpc', '2.0', + 'method', '{{ method }}', + 'params', {{ method_params }} + ), + '{{ node_secret_path }}' + ) AS request +FROM + ready_blocks + +{{ order_by_clause }} + +LIMIT {{ sql_limit }} \ No newline at end of file diff --git a/models/streamline/silver/core/history/streamline__receipts_history.sql b/models/streamline/silver/core/history/streamline__receipts_history.sql index a1ca76a..4cd5823 100644 --- a/models/streamline/silver/core/history/streamline__receipts_history.sql +++ b/models/streamline/silver/core/history/streamline__receipts_history.sql @@ -1,75 +1,113 @@ +{# Set variables #} +{%- set model_name = 'RECEIPTS' -%} +{%- set model_type = 'HISTORY' -%} + +{%- set default_vars = set_default_variables_streamline(model_name, model_type) -%} + +{# Set up parameters for the streamline process. These will come from the vars set in dbt_project.yml #} +{%- set streamline_params = set_streamline_parameters( + model_name=model_name, + model_type=model_type +) -%} + +{%- set node_url = default_vars['node_url'] -%} +{%- set node_secret_path = default_vars['node_secret_path'] -%} +{%- set model_quantum_state = default_vars['model_quantum_state'] -%} +{%- set sql_limit = streamline_params['sql_limit'] -%} +{%- set testing_limit = default_vars['testing_limit'] -%} +{%- set order_by_clause = default_vars['order_by_clause'] -%} +{%- set new_build = default_vars['new_build'] -%} +{%- set method_params = streamline_params['method_params'] -%} +{%- set method = streamline_params['method'] -%} + +{# Log configuration details #} +{{ log_streamline_details( + model_name=model_name, + model_type=model_type, + node_url=node_url, + model_quantum_state=model_quantum_state, + sql_limit=sql_limit, + testing_limit=testing_limit, + order_by_clause=order_by_clause, + new_build=new_build, + streamline_params=streamline_params, + method_params=method_params, + method=method +) }} + +{# Set up dbt configuration #} {{ config ( materialized = "view", post_hook = fsc_utils.if_data_call_function_v2( func = 'streamline.udf_bulk_rest_api_v2', target = "{{this.schema}}.{{this.identifier}}", - params ={ "external_table" :"receipts", - "sql_limit" :"100000", - "producer_batch_size" :"100000", - "worker_batch_size" :"50000", - "sql_source" :"{{this.identifier}}", - "exploded_key": "[\"result\"]" } + params = streamline_params ), - tags = ['streamline_core_history'] + tags = ['streamline_core_history_receipts'] ) }} -WITH last_3_days AS ( +{# Main query starts here #} +WITH +{% if not new_build %} + last_3_days AS ( + SELECT block_number + FROM {{ ref("_block_lookback") }} + ), +{% endif %} + +{# Identify blocks that need processing #} +to_do AS ( + SELECT block_number + FROM {{ ref("streamline__blocks") }} + WHERE + block_number IS NOT NULL + {% if not new_build %} + AND block_number <= (SELECT block_number FROM last_3_days) + {% endif %} - SELECT - block_number - FROM - {{ ref("_block_lookback") }} -), -blocks AS ( - SELECT - block_number - FROM - {{ ref("streamline__blocks") }} - WHERE - block_number <= ( - SELECT - block_number - FROM - last_3_days - ) EXCEPT - SELECT - block_number - FROM - {{ ref("streamline__complete_receipts") }} - WHERE - block_number <= ( - SELECT - block_number - FROM - last_3_days - ) + + {# Exclude blocks that have already been processed #} + SELECT block_number + FROM {{ ref('streamline__' ~ model_name.lower() ~ '_complete') }} + WHERE 1=1 + {% if not new_build %} + AND block_number <= (SELECT block_number FROM last_3_days) + {% endif %} ) + +{# Prepare the final list of blocks to process #} +,ready_blocks AS ( + SELECT block_number + FROM to_do + + {% if testing_limit is not none %} + LIMIT {{ testing_limit }} + {% endif %} +) + +{# Generate API requests for each block #} SELECT block_number, - ROUND( - block_number, - -3 - ) AS partition_key, - {{ target.database }}.live.udf_api( + ROUND(block_number, -3) AS partition_key, + live.udf_api( 'POST', - '{service}/{Authentication}', + '{{ node_url }}', OBJECT_CONSTRUCT( - 'Content-Type', - 'application/json' + 'Content-Type', 'application/json', + 'fsc-quantum-state', '{{ model_quantum_state }}' ), OBJECT_CONSTRUCT( - 'id', - block_number, - 'jsonrpc', - '2.0', - 'method', - 'eth_getBlockReceipts', - 'params', - ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number))), - 'vault/prod/blast/mainnet' - ) AS request - FROM - blocks - ORDER BY - block_number ASC + 'id', block_number, + 'jsonrpc', '2.0', + 'method', '{{ method }}', + 'params', {{ method_params }} + ), + '{{ node_secret_path }}' + ) AS request +FROM + ready_blocks + +{{ order_by_clause }} + +LIMIT {{ sql_limit }} \ No newline at end of file diff --git a/models/streamline/silver/core/history/streamline__traces_history.sql b/models/streamline/silver/core/history/streamline__traces_history.sql index 78898e4..0cf5cc2 100644 --- a/models/streamline/silver/core/history/streamline__traces_history.sql +++ b/models/streamline/silver/core/history/streamline__traces_history.sql @@ -1,76 +1,113 @@ +{# Set variables #} +{%- set model_name = 'TRACES' -%} +{%- set model_type = 'HISTORY' -%} + +{%- set default_vars = set_default_variables_streamline(model_name, model_type) -%} + +{# Set up parameters for the streamline process. These will come from the vars set in dbt_project.yml #} +{%- set streamline_params = set_streamline_parameters( + model_name=model_name, + model_type=model_type +) -%} + +{%- set node_url = default_vars['node_url'] -%} +{%- set node_secret_path = default_vars['node_secret_path'] -%} +{%- set model_quantum_state = default_vars['model_quantum_state'] -%} +{%- set sql_limit = streamline_params['sql_limit'] -%} +{%- set testing_limit = default_vars['testing_limit'] -%} +{%- set order_by_clause = default_vars['order_by_clause'] -%} +{%- set new_build = default_vars['new_build'] -%} +{%- set method_params = streamline_params['method_params'] -%} +{%- set method = streamline_params['method'] -%} + +{# Log configuration details #} +{{ log_streamline_details( + model_name=model_name, + model_type=model_type, + node_url=node_url, + model_quantum_state=model_quantum_state, + sql_limit=sql_limit, + testing_limit=testing_limit, + order_by_clause=order_by_clause, + new_build=new_build, + streamline_params=streamline_params, + method_params=method_params, + method=method +) }} + +{# Set up dbt configuration #} {{ config ( materialized = "view", post_hook = fsc_utils.if_data_call_function_v2( func = 'streamline.udf_bulk_rest_api_v2', target = "{{this.schema}}.{{this.identifier}}", - params ={ "external_table" :"traces", - "sql_limit" :"100000", - "producer_batch_size" :"100000", - "worker_batch_size" :"50000", - "sql_source" :"{{this.identifier}}", - "exploded_key": "[\"result\"]" } + params = streamline_params ), tags = ['streamline_core_history'] ) }} -WITH last_3_days AS ( +{# Main query starts here #} +WITH +{% if not new_build %} + last_3_days AS ( + SELECT block_number + FROM {{ ref("_block_lookback") }} + ), +{% endif %} + +{# Identify blocks that need processing #} +to_do AS ( + SELECT block_number + FROM {{ ref("streamline__blocks") }} + WHERE + block_number IS NOT NULL + {% if not new_build %} + AND block_number <= (SELECT block_number FROM last_3_days) + {% endif %} - SELECT - block_number - FROM - {{ ref("_block_lookback") }} -), -blocks AS ( - SELECT - block_number - FROM - {{ ref("streamline__blocks") }} - WHERE - block_number <= ( - SELECT - block_number - FROM - last_3_days - ) EXCEPT - SELECT - block_number - FROM - {{ ref("streamline__complete_traces") }} - WHERE - block_number <= ( - SELECT - block_number - FROM - last_3_days - ) + + {# Exclude blocks that have already been processed #} + SELECT block_number + FROM {{ ref('streamline__' ~ model_name.lower() ~ '_complete') }} + WHERE 1=1 + {% if not new_build %} + AND block_number <= (SELECT block_number FROM last_3_days) + {% endif %} ) + +{# Prepare the final list of blocks to process #} +,ready_blocks AS ( + SELECT block_number + FROM to_do + + {% if testing_limit is not none %} + LIMIT {{ testing_limit }} + {% endif %} +) + +{# Generate API requests for each block #} SELECT block_number, - ROUND( - block_number, - -3 - ) AS partition_key, - {{ target.database }}.live.udf_api( + ROUND(block_number, -3) AS partition_key, + live.udf_api( 'POST', - '{service}/{Authentication}', + '{{ node_url }}', OBJECT_CONSTRUCT( - 'Content-Type', - 'application/json' + 'Content-Type', 'application/json', + 'fsc-quantum-state', '{{ model_quantum_state }}' ), OBJECT_CONSTRUCT( - 'id', - block_number, - 'jsonrpc', - '2.0', - 'method', - 'debug_traceBlockByNumber', - 'params', - ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number), OBJECT_CONSTRUCT('tracer', 'callTracer', 'timeout', '30s')) + 'id', block_number, + 'jsonrpc', '2.0', + 'method', '{{ method }}', + 'params', {{ method_params }} ), - 'vault/prod/blast/mainnet' + '{{ node_secret_path }}' ) AS request FROM - blocks -ORDER BY - block_number ASC + ready_blocks + +{{ order_by_clause }} + +LIMIT {{ sql_limit }} \ No newline at end of file diff --git a/models/streamline/silver/core/history/streamline__transactions_history.sql b/models/streamline/silver/core/history/streamline__transactions_history.sql deleted file mode 100644 index 9b33383..0000000 --- a/models/streamline/silver/core/history/streamline__transactions_history.sql +++ /dev/null @@ -1,75 +0,0 @@ -{{ config ( - materialized = "view", - post_hook = fsc_utils.if_data_call_function_v2( - func = 'streamline.udf_bulk_rest_api_v2', - target = "{{this.schema}}.{{this.identifier}}", - params ={ "external_table" :"transactions", - "sql_limit" :"100000", - "producer_batch_size" :"100000", - "worker_batch_size" :"50000", - "sql_source" :"{{this.identifier}}", - "exploded_key": "[\"result\", \"transactions\"]" } - ), - tags = ['streamline_core_history'] -) }} - -WITH last_3_days AS ( - - SELECT - block_number - FROM - {{ ref("_block_lookback") }} -), -blocks AS ( - SELECT - block_number - FROM - {{ ref("streamline__blocks") }} - WHERE - block_number <= ( - SELECT - block_number - FROM - last_3_days - ) - EXCEPT - SELECT - block_number - FROM - {{ ref("streamline__complete_transactions") }} - WHERE - block_number <= ( - SELECT - block_number - FROM - last_3_days - ) -) -SELECT - block_number, - ROUND( - block_number, - -3 - ) AS partition_key, - {{ target.database }}.live.udf_api( - 'POST', - '{service}/{Authentication}', - OBJECT_CONSTRUCT( - 'Content-Type', - 'application/json' - ), - OBJECT_CONSTRUCT( - 'id', - block_number, - 'jsonrpc', - '2.0', - 'method', - 'eth_getBlockByNumber', - 'params', - ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number), TRUE)), - 'vault/prod/blast/mainnet' - ) AS request - FROM - blocks - ORDER BY - block_number ASC diff --git a/models/streamline/silver/core/realtime/streamline__blocks_realtime.sql b/models/streamline/silver/core/realtime/streamline__blocks_realtime.sql deleted file mode 100644 index 0f17a0d..0000000 --- a/models/streamline/silver/core/realtime/streamline__blocks_realtime.sql +++ /dev/null @@ -1,82 +0,0 @@ -{{ config ( - materialized = "view", - post_hook = fsc_utils.if_data_call_function_v2( - func = 'streamline.udf_bulk_rest_api_v2', - target = "{{this.schema}}.{{this.identifier}}", - params ={ "external_table" :"blocks", - "sql_limit" :"100000", - "producer_batch_size" :"100000", - "worker_batch_size" :"50000", - "sql_source" :"{{this.identifier}}" } - ), - tags = ['streamline_core_realtime'] -) }} - -WITH last_3_days AS ( - - SELECT - block_number - FROM - {{ ref("_block_lookback") }} -), -to_do AS ( - SELECT - block_number - FROM - {{ ref("streamline__blocks") }} - WHERE - ( - block_number >= ( - SELECT - block_number - FROM - last_3_days - ) - ) - AND block_number IS NOT NULL - EXCEPT - SELECT - block_number - FROM - {{ ref("streamline__complete_blocks") }} - WHERE - block_number >= ( - SELECT - block_number - FROM - last_3_days - ) - AND _inserted_timestamp >= DATEADD( - 'day', - -4, - SYSDATE() - ) -) -SELECT - block_number, - ROUND( - block_number, - -3 - ) AS partition_key, - {{ target.database }}.live.udf_api( - 'POST', - '{service}/{Authentication}', - OBJECT_CONSTRUCT( - 'Content-Type', - 'application/json' - ), - OBJECT_CONSTRUCT( - 'id', - block_number, - 'jsonrpc', - '2.0', - 'method', - 'eth_getBlockByNumber', - 'params', - ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number), FALSE)), - 'vault/prod/blast/mainnet' - ) AS request - FROM - to_do - ORDER BY - partition_key ASC diff --git a/models/streamline/silver/core/realtime/streamline__blocks_transactions_realtime.sql b/models/streamline/silver/core/realtime/streamline__blocks_transactions_realtime.sql new file mode 100644 index 0000000..0bd0a93 --- /dev/null +++ b/models/streamline/silver/core/realtime/streamline__blocks_transactions_realtime.sql @@ -0,0 +1,126 @@ +{# Set variables #} +{%- set model_name = 'BLOCKS_TRANSACTIONS' -%} +{%- set model_type = 'REALTIME' -%} +{%- set min_block = var('GLOBAL_START_UP_BLOCK', none) -%} + +{%- set default_vars = set_default_variables_streamline(model_name, model_type) -%} + +{# Set up parameters for the streamline process. These will come from the vars set in dbt_project.yml #} +{%- set streamline_params = set_streamline_parameters( + model_name=model_name, + model_type=model_type +) -%} + +{%- set node_url = default_vars['node_url'] -%} +{%- set node_secret_path = default_vars['node_secret_path'] -%} +{%- set model_quantum_state = default_vars['model_quantum_state'] -%} +{%- set sql_limit = streamline_params['sql_limit'] -%} +{%- set testing_limit = default_vars['testing_limit'] -%} +{%- set order_by_clause = default_vars['order_by_clause'] -%} +{%- set new_build = default_vars['new_build'] -%} +{%- set method_params = streamline_params['method_params'] -%} +{%- set method = streamline_params['method'] -%} + +{# Log configuration details #} +{{ log_streamline_details( + model_name=model_name, + model_type=model_type, + node_url=node_url, + model_quantum_state=model_quantum_state, + sql_limit=sql_limit, + testing_limit=testing_limit, + order_by_clause=order_by_clause, + new_build=new_build, + streamline_params=streamline_params, + method_params=method_params, + method=method, + min_block=min_block +) }} + +{# Set up dbt configuration #} +{{ config ( + materialized = "view", + post_hook = fsc_utils.if_data_call_function_v2( + func = 'streamline.udf_bulk_rest_api_v2', + target = "{{this.schema}}.{{this.identifier}}", + params = streamline_params + ), + tags = ['streamline_core_realtime'] +) }} + +{# Main query starts here #} +WITH +{% if not new_build %} + last_3_days AS ( + SELECT block_number + FROM {{ ref("_block_lookback") }} + ), +{% endif %} + +{# Identify blocks that need processing #} +to_do AS ( + SELECT block_number + FROM {{ ref("streamline__blocks") }} + WHERE + block_number IS NOT NULL + {% if not new_build %} + AND block_number >= (SELECT block_number FROM last_3_days) + {% endif %} + + {% if min_block is not none %} + AND block_number >= {{ min_block }} + {% endif %} + + EXCEPT + + SELECT block_number + FROM {{ ref("streamline__blocks_complete") }} b + INNER JOIN {{ ref("streamline__transactions_complete") }} t USING(block_number) + WHERE 1=1 + {% if not new_build %} + AND block_number >= (SELECT block_number FROM last_3_days) + {% endif %} +), +ready_blocks AS ( + SELECT block_number + FROM to_do + + {% if not new_build %} + UNION + SELECT block_number + FROM {{ ref("_unconfirmed_blocks") }} + UNION + SELECT block_number + FROM {{ ref("_missing_txs") }} + {% endif %} + + {% if testing_limit is not none %} + LIMIT {{ testing_limit }} + {% endif %} +) + +{# Generate API requests for each block #} +SELECT + block_number, + ROUND(block_number, -3) AS partition_key, + live.udf_api( + 'POST', + '{{ node_url }}', + OBJECT_CONSTRUCT( + 'Content-Type', 'application/json', + 'fsc-quantum-state', '{{ model_quantum_state }}' + ), + OBJECT_CONSTRUCT( + 'id', block_number, + 'jsonrpc', '2.0', + 'method', '{{ method }}', + 'params', {{ method_params }} + ), + '{{ node_secret_path }}' + ) AS request +FROM + ready_blocks + +{{ order_by_clause }} + +LIMIT {{ sql_limit }} \ No newline at end of file diff --git a/models/streamline/silver/core/realtime/streamline__confirm_blocks_realtime.sql b/models/streamline/silver/core/realtime/streamline__confirm_blocks_realtime.sql index e41b698..6135a3f 100644 --- a/models/streamline/silver/core/realtime/streamline__confirm_blocks_realtime.sql +++ b/models/streamline/silver/core/realtime/streamline__confirm_blocks_realtime.sql @@ -1,24 +1,63 @@ +{# Set variables #} +{%- set model_name = 'CONFIRM_BLOCKS' -%} +{%- set model_type = 'REALTIME' -%} +{%- set min_block = var('GLOBAL_START_UP_BLOCK', none) -%} + +{%- set default_vars = set_default_variables_streamline(model_name, model_type) -%} + +{# Set up parameters for the streamline process. These will come from the vars set in dbt_project.yml #} +{%- set streamline_params = set_streamline_parameters( + model_name=model_name, + model_type=model_type +) -%} + +{%- set node_url = default_vars['node_url'] -%} +{%- set node_secret_path = default_vars['node_secret_path'] -%} +{%- set model_quantum_state = default_vars['model_quantum_state'] -%} +{%- set sql_limit = streamline_params['sql_limit'] -%} +{%- set testing_limit = default_vars['testing_limit'] -%} +{%- set order_by_clause = default_vars['order_by_clause'] -%} +{%- set new_build = default_vars['new_build'] -%} +{%- set method_params = streamline_params['method_params'] -%} +{%- set method = streamline_params['method'] -%} + +{# Log configuration details #} +{{ log_streamline_details( + model_name=model_name, + model_type=model_type, + node_url=node_url, + model_quantum_state=model_quantum_state, + sql_limit=sql_limit, + testing_limit=testing_limit, + order_by_clause=order_by_clause, + new_build=new_build, + streamline_params=streamline_params, + method_params=method_params, + method=method, + min_block=min_block +) }} + +{# Set up dbt configuration #} {{ config ( materialized = "view", post_hook = fsc_utils.if_data_call_function_v2( func = 'streamline.udf_bulk_rest_api_v2', target = "{{this.schema}}.{{this.identifier}}", - params ={ "external_table" :"confirm_blocks", - "sql_limit" :"100000", - "producer_batch_size" :"100000", - "worker_batch_size" :"50000", - "sql_source" :"{{this.identifier}}" } + params = streamline_params ), - tags = ['streamline_core_realtime'] + tags = ['streamline_core_realtime_confirm_blocks'] ) }} -WITH last_3_days AS ( +{# Main query starts here #} +WITH +{% if not new_build %} + last_3_days AS ( + SELECT block_number + FROM {{ ref("_block_lookback") }} + ), +{% endif %} - SELECT - block_number - FROM - {{ ref("_block_lookback") }} -), +{# Delay blocks #} look_back AS ( SELECT block_number @@ -28,76 +67,72 @@ look_back AS ( ORDER BY block_number DESC ) = 6 -), -tbl AS ( - SELECT - block_number - FROM - {{ ref("streamline__blocks") }} - WHERE + ), + +{# Identify blocks that need processing #} +to_do AS ( + SELECT block_number + FROM {{ ref("streamline__blocks") }} + WHERE block_number IS NOT NULL - AND block_number <= ( - SELECT - block_number - FROM - look_back - ) - AND block_number >= ( - SELECT - block_number - FROM - last_3_days - ) + AND block_number <= (SELECT block_number FROM look_back) + {% if not new_build %} + AND block_number >= (SELECT block_number FROM last_3_days) + {% endif %} + {% if min_block is not none %} + AND block_number >= {{ min_block }} + {% endif %} + EXCEPT - SELECT - block_number - FROM - {{ ref("streamline__complete_confirmed_blocks") }} - WHERE - block_number IS NOT NULL - AND block_number <= ( - SELECT - block_number - FROM - look_back - ) + + {# Exclude blocks that have already been processed #} + SELECT block_number + FROM {{ ref('streamline__' ~ model_name.lower() ~ '_complete') }} + WHERE 1=1 + AND block_number IS NOT NULL + AND block_number <= (SELECT block_number FROM look_back) AND _inserted_timestamp >= DATEADD( 'day', -4, SYSDATE() ) - AND block_number >= ( - SELECT - block_number - FROM - last_3_days - ) + {% if not new_build %} + AND block_number >= (SELECT block_number FROM last_3_days) + {% endif %} ) + +{# Prepare the final list of blocks to process #} +,ready_blocks AS ( + SELECT block_number + FROM to_do + + {% if testing_limit is not none %} + LIMIT {{ testing_limit }} + {% endif %} +) + +{# Generate API requests for each block #} SELECT block_number, - ROUND( - block_number, - -3 - ) AS partition_key, - {{ target.database }}.live.udf_api( + ROUND(block_number, -3) AS partition_key, + live.udf_api( 'POST', - '{service}/{Authentication}', + '{{ node_url }}', OBJECT_CONSTRUCT( - 'Content-Type', - 'application/json' + 'Content-Type', 'application/json', + 'fsc-quantum-state', '{{ model_quantum_state }}' ), OBJECT_CONSTRUCT( - 'id', - block_number, - 'jsonrpc', - '2.0', - 'method', - 'eth_getBlockByNumber', - 'params', - ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number), FALSE)), - 'vault/prod/blast/mainnet' - ) AS request - FROM - tbl - ORDER BY - block_number ASC + 'id', block_number, + 'jsonrpc', '2.0', + 'method', '{{ method }}', + 'params', {{ method_params }} + ), + '{{ node_secret_path }}' + ) AS request +FROM + ready_blocks + +{{ order_by_clause }} + +LIMIT {{ sql_limit }} \ No newline at end of file diff --git a/models/streamline/silver/core/realtime/streamline__receipts_realtime.sql b/models/streamline/silver/core/realtime/streamline__receipts_realtime.sql index 3814d56..6ec5c85 100644 --- a/models/streamline/silver/core/realtime/streamline__receipts_realtime.sql +++ b/models/streamline/silver/core/realtime/streamline__receipts_realtime.sql @@ -1,109 +1,130 @@ +{# Set variables #} +{%- set model_name = 'RECEIPTS' -%} +{%- set model_type = 'REALTIME' -%} +{%- set min_block = var('GLOBAL_START_UP_BLOCK', none) -%} + +{%- set default_vars = set_default_variables_streamline(model_name, model_type) -%} + +{# Set up parameters for the streamline process. These will come from the vars set in dbt_project.yml #} +{%- set streamline_params = set_streamline_parameters( + model_name=model_name, + model_type=model_type +) -%} + +{%- set node_url = default_vars['node_url'] -%} +{%- set node_secret_path = default_vars['node_secret_path'] -%} +{%- set model_quantum_state = default_vars['model_quantum_state'] -%} +{%- set sql_limit = streamline_params['sql_limit'] -%} +{%- set testing_limit = default_vars['testing_limit'] -%} +{%- set order_by_clause = default_vars['order_by_clause'] -%} +{%- set new_build = default_vars['new_build'] -%} +{%- set method_params = streamline_params['method_params'] -%} +{%- set method = streamline_params['method'] -%} + +{# Log configuration details #} +{{ log_streamline_details( + model_name=model_name, + model_type=model_type, + node_url=node_url, + model_quantum_state=model_quantum_state, + sql_limit=sql_limit, + testing_limit=testing_limit, + order_by_clause=order_by_clause, + new_build=new_build, + streamline_params=streamline_params, + method_params=method_params, + method=method, + min_block=min_block +) }} + +{# Set up dbt configuration #} {{ config ( materialized = "view", post_hook = fsc_utils.if_data_call_function_v2( func = 'streamline.udf_bulk_rest_api_v2', target = "{{this.schema}}.{{this.identifier}}", - params ={ "external_table" :"receipts", - "sql_limit" :"100000", - "producer_batch_size" :"100000", - "worker_batch_size" :"50000", - "sql_source" :"{{this.identifier}}", - "exploded_key": "[\"result\"]" } + params = streamline_params ), - tags = ['streamline_core_realtime'] + tags = ['streamline_core_realtime_receipts'] ) }} -WITH last_3_days AS ( +{# Main query starts here #} +WITH +{% if not new_build %} + last_3_days AS ( + SELECT block_number + FROM {{ ref("_block_lookback") }} + ), +{% endif %} - SELECT - block_number - FROM - {{ ref("_block_lookback") }} -), +{# Identify blocks that need processing #} to_do AS ( - SELECT - block_number - FROM - {{ ref("streamline__blocks") }} - WHERE - ( - block_number >= ( - SELECT - block_number - FROM - last_3_days - ) - ) - AND block_number IS NOT NULL + SELECT block_number + FROM {{ ref("streamline__blocks") }} + WHERE + block_number IS NOT NULL + {% if not new_build %} + AND block_number >= (SELECT block_number FROM last_3_days) + {% endif %} + {% if min_block is not none %} + AND block_number >= {{ min_block }} + {% endif %} + EXCEPT - SELECT - block_number - FROM - {{ ref("streamline__complete_receipts") }} - WHERE - block_number >= ( - SELECT - block_number - FROM - last_3_days - ) - AND _inserted_timestamp >= DATEADD( - 'day', - -4, - SYSDATE() - ) -), -ready_blocks AS ( - SELECT - block_number - FROM - to_do - UNION - SELECT - block_number - FROM - ( - SELECT - block_number - FROM - {{ ref("_missing_receipts") }} - UNION - SELECT - block_number - FROM - {{ ref("_missing_txs") }} - UNION - SELECT - block_number - FROM - {{ ref("_unconfirmed_blocks") }} - ) + + {# Exclude blocks that have already been processed #} + SELECT block_number + FROM {{ ref('streamline__' ~ model_name.lower() ~ '_complete') }} + WHERE 1=1 + {% if not new_build %} + AND block_number >= (SELECT block_number FROM last_3_days) + {% endif %} ) + +{# Prepare the final list of blocks to process #} +,ready_blocks AS ( + SELECT block_number + FROM to_do + + {% if not new_build %} + UNION + SELECT block_number + FROM {{ ref("_unconfirmed_blocks") }} + UNION + SELECT block_number + FROM {{ ref("_missing_txs") }} + UNION + SELECT block_number + FROM {{ ref("_missing_receipts") }} + {% endif %} + + {% if testing_limit is not none %} + LIMIT {{ testing_limit }} + {% endif %} +) + +{# Generate API requests for each block #} SELECT block_number, - ROUND( - block_number, - -3 - ) AS partition_key, - {{ target.database }}.live.udf_api( + ROUND(block_number, -3) AS partition_key, + live.udf_api( 'POST', - '{service}/{Authentication}', + '{{ node_url }}', OBJECT_CONSTRUCT( - 'Content-Type', - 'application/json' + 'Content-Type', 'application/json', + 'fsc-quantum-state', '{{ model_quantum_state }}' ), OBJECT_CONSTRUCT( - 'id', - block_number, - 'jsonrpc', - '2.0', - 'method', - 'eth_getBlockReceipts', - 'params', - ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number))), - 'vault/prod/blast/mainnet' - ) AS request - FROM - ready_blocks - ORDER BY - block_number ASC + 'id', block_number, + 'jsonrpc', '2.0', + 'method', '{{ method }}', + 'params', {{ method_params }} + ), + '{{ node_secret_path }}' + ) AS request +FROM + ready_blocks + +{{ order_by_clause }} + +LIMIT {{ sql_limit }} \ No newline at end of file diff --git a/models/streamline/silver/core/realtime/streamline__traces_realtime.sql b/models/streamline/silver/core/realtime/streamline__traces_realtime.sql index 8006434..34f7f98 100644 --- a/models/streamline/silver/core/realtime/streamline__traces_realtime.sql +++ b/models/streamline/silver/core/realtime/streamline__traces_realtime.sql @@ -1,105 +1,127 @@ +{# Set variables #} +{%- set model_name = 'TRACES' -%} +{%- set model_type = 'REALTIME' -%} +{%- set min_block = var('GLOBAL_START_UP_BLOCK', none) -%} + +{%- set default_vars = set_default_variables_streamline(model_name, model_type) -%} + +{# Set up parameters for the streamline process. These will come from the vars set in dbt_project.yml #} +{%- set streamline_params = set_streamline_parameters( + model_name=model_name, + model_type=model_type +) -%} + +{%- set node_url = default_vars['node_url'] -%} +{%- set node_secret_path = default_vars['node_secret_path'] -%} +{%- set model_quantum_state = default_vars['model_quantum_state'] -%} +{%- set sql_limit = streamline_params['sql_limit'] -%} +{%- set testing_limit = default_vars['testing_limit'] -%} +{%- set order_by_clause = default_vars['order_by_clause'] -%} +{%- set new_build = default_vars['new_build'] -%} +{%- set method_params = streamline_params['method_params'] -%} +{%- set method = streamline_params['method'] -%} + +{# Log configuration details #} +{{ log_streamline_details( + model_name=model_name, + model_type=model_type, + node_url=node_url, + model_quantum_state=model_quantum_state, + sql_limit=sql_limit, + testing_limit=testing_limit, + order_by_clause=order_by_clause, + new_build=new_build, + streamline_params=streamline_params, + method_params=method_params, + method=method, + min_block=min_block +) }} + +{# Set up dbt configuration #} {{ config ( materialized = "view", post_hook = fsc_utils.if_data_call_function_v2( func = 'streamline.udf_bulk_rest_api_v2', target = "{{this.schema}}.{{this.identifier}}", - params ={ "external_table" :"traces", - "sql_limit" :"100000", - "producer_batch_size" :"100000", - "worker_batch_size" :"50000", - "sql_source" :"{{this.identifier}}", - "exploded_key": "[\"result\"]" } + params = streamline_params ), tags = ['streamline_core_realtime'] ) }} -WITH last_3_days AS ( +{# Main query starts here #} +WITH +{% if not new_build %} + last_3_days AS ( + SELECT block_number + FROM {{ ref("_block_lookback") }} + ), +{% endif %} - SELECT - block_number - FROM - {{ ref("_block_lookback") }} -), +{# Identify blocks that need processing #} to_do AS ( - SELECT - block_number - FROM - {{ ref("streamline__blocks") }} - WHERE - ( - block_number >= ( - SELECT - block_number - FROM - last_3_days - ) - ) - AND block_number IS NOT NULL + SELECT block_number + FROM {{ ref("streamline__blocks") }} + WHERE + block_number IS NOT NULL + {% if not new_build %} + AND block_number >= (SELECT block_number FROM last_3_days) + {% endif %} + {% if min_block is not none %} + AND block_number >= {{ min_block }} + {% endif %} + EXCEPT - SELECT - block_number - FROM - {{ ref("streamline__complete_traces") }} - WHERE - block_number >= ( - SELECT - block_number - FROM - last_3_days - ) - AND _inserted_timestamp >= DATEADD( - 'day', - -4, - SYSDATE() - ) -), -ready_blocks AS ( - SELECT - block_number - FROM - to_do - UNION - SELECT - block_number - FROM - ( - SELECT - block_number - FROM - {{ ref("_missing_traces") }} - UNION - SELECT - block_number - FROM - {{ ref("_unconfirmed_blocks") }} - ) + + {# Exclude blocks that have already been processed #} + SELECT block_number + FROM {{ ref('streamline__' ~ model_name.lower() ~ '_complete') }} + WHERE 1=1 + {% if not new_build %} + AND block_number >= (SELECT block_number FROM last_3_days) + {% endif %} ) + +{# Prepare the final list of blocks to process #} +,ready_blocks AS ( + SELECT block_number + FROM to_do + + {% if not new_build %} + UNION + SELECT block_number + FROM {{ ref("_unconfirmed_blocks") }} + UNION + SELECT block_number + FROM {{ ref("_missing_traces") }} + {% endif %} + + {% if testing_limit is not none %} + LIMIT {{ testing_limit }} + {% endif %} +) + +{# Generate API requests for each block #} SELECT block_number, - ROUND( - block_number, - -3 - ) AS partition_key, - {{ target.database }}.live.udf_api( + ROUND(block_number, -3) AS partition_key, + live.udf_api( 'POST', - '{service}/{Authentication}', + '{{ node_url }}', OBJECT_CONSTRUCT( - 'Content-Type', - 'application/json' + 'Content-Type', 'application/json', + 'fsc-quantum-state', '{{ model_quantum_state }}' ), OBJECT_CONSTRUCT( - 'id', - block_number, - 'jsonrpc', - '2.0', - 'method', - 'debug_traceBlockByNumber', - 'params', - ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number), OBJECT_CONSTRUCT('tracer', 'callTracer', 'timeout', '30s')) + 'id', block_number, + 'jsonrpc', '2.0', + 'method', '{{ method }}', + 'params', {{ method_params }} ), - 'vault/prod/blast/mainnet' + '{{ node_secret_path }}' ) AS request FROM ready_blocks -ORDER BY - block_number ASC + +{{ order_by_clause }} + +LIMIT {{ sql_limit }} \ No newline at end of file diff --git a/models/streamline/silver/core/realtime/streamline__transactions_realtime.sql b/models/streamline/silver/core/realtime/streamline__transactions_realtime.sql deleted file mode 100644 index 472b684..0000000 --- a/models/streamline/silver/core/realtime/streamline__transactions_realtime.sql +++ /dev/null @@ -1,104 +0,0 @@ -{{ config ( - materialized = "view", - post_hook = fsc_utils.if_data_call_function_v2( - func = 'streamline.udf_bulk_rest_api_v2', - target = "{{this.schema}}.{{this.identifier}}", - params ={ "external_table" :"transactions", - "sql_limit" :"100000", - "producer_batch_size" :"100000", - "worker_batch_size" :"50000", - "sql_source" :"{{this.identifier}}", - "exploded_key": "[\"result\", \"transactions\"]" } - ), - tags = ['streamline_core_realtime'] -) }} - -WITH last_3_days AS ( - - SELECT - block_number - FROM - {{ ref("_block_lookback") }} -), -to_do AS ( - SELECT - block_number - FROM - {{ ref("streamline__blocks") }} - WHERE - ( - block_number >= ( - SELECT - block_number - FROM - last_3_days - ) - ) - AND block_number IS NOT NULL - EXCEPT - SELECT - block_number - FROM - {{ ref("streamline__complete_transactions") }} - WHERE - block_number >= ( - SELECT - block_number - FROM - last_3_days - ) - AND _inserted_timestamp >= DATEADD( - 'day', - -4, - SYSDATE() - ) -), -ready_blocks AS ( - SELECT - block_number - FROM - to_do - UNION - SELECT - block_number - FROM - ( - SELECT - block_number - FROM - {{ ref("_missing_txs") }} - UNION - SELECT - block_number - FROM - {{ ref("_unconfirmed_blocks") }} - ) -) -SELECT - block_number, - ROUND( - block_number, - -3 - ) AS partition_key, - {{ target.database }}.live.udf_api( - 'POST', - '{service}/{Authentication}', - OBJECT_CONSTRUCT( - 'Content-Type', - 'application/json' - ), - OBJECT_CONSTRUCT( - 'id', - block_number, - 'jsonrpc', - '2.0', - 'method', - 'eth_getBlockByNumber', - 'params', - ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number), TRUE)), - 'vault/prod/blast/mainnet' - ) AS request - FROM - ready_blocks - ORDER BY - block_number ASC diff --git a/models/streamline/silver/core/retry/_missing_traces.sql b/models/streamline/silver/core/retry/_missing_traces.sql index f4ea89d..346f556 100644 --- a/models/streamline/silver/core/retry/_missing_traces.sql +++ b/models/streamline/silver/core/retry/_missing_traces.sql @@ -2,30 +2,16 @@ materialized = "ephemeral" ) }} -WITH lookback AS ( - - SELECT - block_number - FROM - {{ ref("_block_lookback") }} -) SELECT - DISTINCT tx.block_number block_number + DISTINCT tx.block_number FROM - {{ ref("silver__transactions") }} + {{ ref("test_silver__transactions_recent") }} tx - LEFT JOIN {{ ref("core__fact_traces") }} - tr - ON tx.block_number = tr.block_number - AND tx.tx_hash = tr.tx_hash -WHERE - tx.block_timestamp >= DATEADD('hour', -84, SYSDATE()) - AND tr.tx_hash IS NULL - AND tx.block_number >= ( - SELECT - block_number - FROM - lookback + LEFT JOIN {{ ref("test_gold__fact_traces_recent") }} + tr USING ( + block_number, + tx_hash ) - AND tr.block_timestamp >= DATEADD('hour', -84, SYSDATE()) - AND tr.block_timestamp IS NOT NULL +WHERE + tr.tx_hash IS NULL + AND tx.block_timestamp > DATEADD('day', -5, SYSDATE()) diff --git a/models/streamline/silver/core/streamline__blocks.sql b/models/streamline/silver/core/streamline__blocks.sql index ffa8e8e..9e56c76 100644 --- a/models/streamline/silver/core/streamline__blocks.sql +++ b/models/streamline/silver/core/streamline__blocks.sql @@ -1,17 +1,29 @@ +{%- if flags.WHICH == 'compile' and execute -%} + + {% set config_log = '\n' %} + {% set config_log = config_log ~ '\n=== DBT Model Config ===\n'%} + {% set config_log = config_log ~ '\n{{ config (\n' %} + {% set config_log = config_log ~ ' materialized = "' ~ config.get('materialized') ~ '",\n' %} + {% set config_log = config_log ~ ' tags = ' ~ config.get('tags') | tojson ~ '\n' %} + {% set config_log = config_log ~ ') }}\n' %} + {{ log(config_log, info=True) }} + {{ log("", info=True) }} +{%- endif -%} + {{ config ( materialized = "view", tags = ['streamline_core_complete'] ) }} SELECT - _id AS block_number, - REPLACE( - concat_ws('', '0x', to_char(block_number, 'XXXXXXXX')), - ' ', - '' - ) AS block_number_hex + _id, + ( + ({{ var('GLOBAL_BLOCKS_PER_HOUR',0) }} / 60) * {{ var('GLOBAL_CHAINHEAD_DELAY',3) }} + ) :: INT AS block_number_delay, --minute-based block delay + (_id - block_number_delay) :: INT AS block_number, + utils.udf_int_to_hex(block_number) AS block_number_hex FROM - {{ ref("silver__number_sequence") }} + {{ ref('silver__number_sequence') }} WHERE _id <= ( SELECT @@ -20,7 +32,5 @@ WHERE 0 ) FROM - {{ref("streamline__get_chainhead")}} - ) -ORDER BY - _id ASC + {{ ref("streamline__get_chainhead") }} + ) \ No newline at end of file diff --git a/models/streamline/silver/core/streamline__get_chainhead.sql b/models/streamline/silver/core/streamline__get_chainhead.sql index 7be0901..9e98ae7 100644 --- a/models/streamline/silver/core/streamline__get_chainhead.sql +++ b/models/streamline/silver/core/streamline__get_chainhead.sql @@ -1,21 +1,45 @@ +{%- set model_quantum_state = var('CHAINHEAD_QUANTUM_STATE', 'livequery') -%} + +{%- set node_url = var('GLOBAL_NODE_URL', '{Service}/{Authentication}') -%} + +{%- if flags.WHICH == 'compile' and execute -%} + + {{ log("=== Current Variable Settings ===", info=True) }} + {{ log("CHAINHEAD_QUANTUM_STATE: " ~ model_quantum_state, info=True) }} + {{ log("", info=True) }} + + {{ log("=== API Details ===", info=True) }} + {{ log("NODE_URL: " ~ node_url, info=True) }} + {{ log("NODE_SECRET_PATH: " ~ var('GLOBAL_NODE_SECRET_PATH'), info=True) }} + {{ log("", info=True) }} + + {% set config_log = '\n' %} + {% set config_log = config_log ~ '\n=== DBT Model Config ===\n'%} + {% set config_log = config_log ~ '\n{{ config (\n' %} + {% set config_log = config_log ~ ' materialized = "' ~ config.get('materialized') ~ '",\n' %} + {% set config_log = config_log ~ ' tags = ' ~ config.get('tags') | tojson ~ '\n' %} + {% set config_log = config_log ~ ') }}\n' %} + {{ log(config_log, info=True) }} + {{ log("", info=True) }} + +{%- endif -%} + {{ config ( materialized = 'table', - tags = ['streamline_core_complete'] + tags = ['streamline_core_complete','chainhead'] ) }} SELECT - {{ target.database }}.live.udf_api( + live.udf_api( 'POST', - '{service}/{Authentication}', + '{{ node_url }}', OBJECT_CONSTRUCT( - 'Content-Type', - 'application/json', - 'fsc-quantum-state', - 'livequery' + 'Content-Type', 'application/json', + 'fsc-quantum-state', '{{ model_quantum_state }}' ), OBJECT_CONSTRUCT( 'id', - 1, + 0, 'jsonrpc', '2.0', 'method', @@ -23,8 +47,8 @@ SELECT 'params', [] ), - 'vault/prod/blast/mainnet' + '{{ var('GLOBAL_NODE_SECRET_PATH') }}' ) AS resp, utils.udf_hex_to_int( resp :data :result :: STRING - ) AS block_number + ) AS block_number \ No newline at end of file diff --git a/models/streamline/silver/core/streamline__get_chainhead.yml b/models/streamline/silver/core/streamline__get_chainhead.yml new file mode 100644 index 0000000..e318e4d --- /dev/null +++ b/models/streamline/silver/core/streamline__get_chainhead.yml @@ -0,0 +1,9 @@ +version: 2 +models: + - name: streamline__get_chainhead + description: "This model is used to get the chainhead from the blockchain." + + columns: + - name: BLOCK_NUMBER + tests: + - not_null \ No newline at end of file diff --git a/models/streamline/silver/decoded_logs/complete/streamline__decoded_logs_complete.sql b/models/streamline/silver/decoded_logs/complete/streamline__decoded_logs_complete.sql new file mode 100644 index 0000000..3e80589 --- /dev/null +++ b/models/streamline/silver/decoded_logs/complete/streamline__decoded_logs_complete.sql @@ -0,0 +1,50 @@ +{# Set variables #} +{%- set source_name = 'DECODED_LOGS' -%} +{%- set model_type = 'COMPLETE' -%} + +{%- set full_refresh_type = var((source_name ~ '_complete_full_refresh').upper(), false) -%} + +{% set post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(_log_id)" %} + +{# Log configuration details #} +{{ log_model_details() }} + +{# Set up dbt configuration #} +-- depends_on: {{ ref('bronze__' ~ source_name.lower()) }} + +{{ config ( + materialized = "incremental", + unique_key = "_log_id", + cluster_by = "ROUND(block_number, -3)", + incremental_predicates = ["dynamic_range", "block_number"], + merge_update_columns = ["_log_id"], + post_hook = post_hook, + full_refresh = full_refresh_type, + tags = ['streamline_decoded_logs_complete'] +) }} + +{# Main query starts here #} +SELECT + block_number, + file_name, + id AS _log_id, + {{ dbt_utils.generate_surrogate_key(['id']) }} AS complete_{{ source_name.lower() }}_id, + SYSDATE() AS inserted_timestamp, + SYSDATE() AS modified_timestamp, + _inserted_timestamp, + '{{ invocation_id }}' AS _invocation_id +FROM + {% if is_incremental() %} + {{ ref('bronze__' ~ source_name.lower()) }} + WHERE + _inserted_timestamp >= ( + SELECT + COALESCE(MAX(_inserted_timestamp), '1970-01-01'::TIMESTAMP) AS _inserted_timestamp + FROM + {{ this }} + ) + {% else %} + {{ ref('bronze__' ~ source_name.lower() ~ '_fr') }} + {% endif %} + +QUALIFY (ROW_NUMBER() OVER (PARTITION BY id ORDER BY _inserted_timestamp DESC)) = 1 diff --git a/models/streamline/silver/decoded_logs/realtime/streamline__decoded_logs_realtime.sql b/models/streamline/silver/decoded_logs/realtime/streamline__decoded_logs_realtime.sql new file mode 100644 index 0000000..a0ff21b --- /dev/null +++ b/models/streamline/silver/decoded_logs/realtime/streamline__decoded_logs_realtime.sql @@ -0,0 +1,110 @@ +{%- set testing_limit = var('DECODED_LOGS_REALTIME_TESTING_LIMIT', none) -%} + +{%- set streamline_params = { + "external_table": var("DECODED_LOGS_REALTIME_EXTERNAL_TABLE", "decoded_logs"), + "sql_limit": var("DECODED_LOGS_REALTIME_SQL_LIMIT", 10000000), + "producer_batch_size": var("DECODED_LOGS_REALTIME_PRODUCER_BATCH_SIZE", 400000), + "worker_batch_size": var("DECODED_LOGS_REALTIME_WORKER_BATCH_SIZE", 200000), + "sql_source": "decoded_logs_realtime" +} -%} + +{# Log configuration details #} +{{ log_model_details( + params = streamline_params +) }} + +{# Set up dbt configuration #} +{{ config ( + materialized = "view", + post_hook = [fsc_utils.if_data_call_function_v2( + func = 'streamline.udf_bulk_decode_logs_v2', + target = "{{this.schema}}.{{this.identifier}}", + params = { + "external_table": streamline_params['external_table'], + "sql_limit": streamline_params['sql_limit'], + "producer_batch_size": streamline_params['producer_batch_size'], + "worker_batch_size": streamline_params['worker_batch_size'], + "sql_source": streamline_params['sql_source'] + } + ), + fsc_utils.if_data_call_wait()], + tags = ['streamline_decoded_logs_realtime'] +) }} + +WITH target_blocks AS ( + SELECT + block_number + FROM + {{ ref('core__fact_blocks') }} + WHERE + block_number >= ( + SELECT + block_number + FROM + {{ ref('_24_hour_lookback') }} + ) +), +existing_logs_to_exclude AS ( + SELECT + _log_id + FROM + {{ ref('streamline__decoded_logs_complete') }} + l + INNER JOIN target_blocks b USING (block_number) + WHERE + l.inserted_timestamp :: DATE >= DATEADD('day', -2, SYSDATE()) +), +candidate_logs AS ( + SELECT + l.block_number, + l.tx_hash, + l.event_index, + l.contract_address, + l.topics, + l.data, + CONCAT( + l.tx_hash :: STRING, + '-', + l.event_index :: STRING + ) AS _log_id + FROM + target_blocks b + INNER JOIN {{ ref('core__fact_event_logs') }} + l USING (block_number) + WHERE + l.tx_succeeded + AND l.inserted_timestamp :: DATE >= DATEADD('day', -2, SYSDATE()) +) +SELECT + l.block_number, + l._log_id, + A.abi, + OBJECT_CONSTRUCT( + 'topics', + l.topics, + 'data', + l.data, + 'address', + l.contract_address + ) AS DATA +FROM + candidate_logs l + INNER JOIN {{ ref('silver__complete_event_abis') }} A + ON A.parent_contract_address = l.contract_address + AND A.event_signature = l.topics [0] :: STRING + AND l.block_number BETWEEN A.start_block + AND A.end_block +WHERE + NOT EXISTS ( + SELECT + 1 + FROM + existing_logs_to_exclude e + WHERE + e._log_id = l._log_id + ) + +{% if testing_limit is not none %} + LIMIT + {{ testing_limit }} +{% endif %} \ No newline at end of file diff --git a/models/streamline/silver/decoder/complete/streamline__complete_decode_logs.sql b/models/streamline/silver/decoder/complete/streamline__complete_decode_logs.sql deleted file mode 100644 index 4130953..0000000 --- a/models/streamline/silver/decoder/complete/streamline__complete_decode_logs.sql +++ /dev/null @@ -1,32 +0,0 @@ --- depends_on: {{ ref('bronze__decoded_logs') }} -{{ config ( - materialized = "incremental", - unique_key = "_log_id", - cluster_by = "ROUND(block_number, -3)", - incremental_predicates = ["dynamic_range", "block_number"], - merge_update_columns = ["_log_id"], - post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(_log_id)", - tags = ['streamline_decoded_logs_complete'] -) }} - -SELECT - block_number, - id AS _log_id, - _inserted_timestamp -FROM - -{% if is_incremental() %} -{{ ref('bronze__decoded_logs') }} -WHERE - TO_TIMESTAMP_NTZ(_inserted_timestamp) >= ( - SELECT - COALESCE(MAX(TO_TIMESTAMP_NTZ(_inserted_timestamp)), '1970-01-01 00:00:00') _inserted_timestamp - FROM - {{ this }}) - {% else %} - {{ ref('bronze__fr_decoded_logs') }} - {% endif %} - - qualify(ROW_NUMBER() over (PARTITION BY id - ORDER BY - _inserted_timestamp DESC)) = 1 diff --git a/models/streamline/silver/decoder/realtime/streamline__decode_logs_realtime.sql b/models/streamline/silver/decoder/realtime/streamline__decode_logs_realtime.sql deleted file mode 100644 index 0df4388..0000000 --- a/models/streamline/silver/decoder/realtime/streamline__decode_logs_realtime.sql +++ /dev/null @@ -1,84 +0,0 @@ -{{ config ( - materialized = "view", - post_hook = [if_data_call_function( - func = "{{this.schema}}.udf_bulk_decode_logs(object_construct('sql_source', '{{this.identifier}}', 'external_table', 'DECODED_LOGS', 'sql_limit', {{var('sql_limit','2000000')}}, 'producer_batch_size', {{var('producer_batch_size','400000')}}, 'worker_batch_size', {{var('worker_batch_size','200000')}}))", - target = "{{this.schema}}.{{this.identifier}}" - ), - "call system$wait(" ~ var("WAIT", 400) ~ ")" ], - tags = ['streamline_decoded_logs_realtime'] -) }} - -WITH target_blocks AS ( - - SELECT - block_number - FROM - {{ ref('core__fact_blocks') }} - WHERE - block_number >= ( - SELECT - block_number - FROM - {{ ref("_block_lookback") }} - ) -), -existing_logs_to_exclude AS ( - SELECT - _log_id - FROM - {{ ref('streamline__complete_decode_logs') }} - l - INNER JOIN target_blocks b USING (block_number) - WHERE - l._inserted_timestamp :: DATE >= DATEADD('day', -5, SYSDATE()) -), -candidate_logs AS ( - SELECT - l.block_number, - l.tx_hash, - l.event_index, - l.contract_address, - l.topics, - l.data, - CONCAT( - l.tx_hash :: STRING, - '-', - l.event_index :: STRING - ) AS _log_id - FROM - target_blocks b - INNER JOIN {{ ref('core__fact_event_logs') }} - l USING (block_number) - WHERE - l.tx_succeeded - AND l.inserted_timestamp :: DATE >= DATEADD('day', -5, SYSDATE()) -) -SELECT - l.block_number, - l._log_id, - A.abi AS abi, - OBJECT_CONSTRUCT( - 'topics', - l.topics, - 'data', - l.data, - 'address', - l.contract_address - ) AS DATA -FROM - candidate_logs l - INNER JOIN {{ ref('silver__complete_event_abis') }} A - ON A.parent_contract_address = l.contract_address - AND A.event_signature = l.topics [0] :: STRING - AND l.block_number BETWEEN A.start_block - AND A.end_block -WHERE - NOT EXISTS ( - SELECT - 1 - FROM - existing_logs_to_exclude e - WHERE - e._log_id = l._log_id - ) -limit 7500000 \ No newline at end of file diff --git a/package-lock.yml b/package-lock.yml deleted file mode 100644 index ee9ea8a..0000000 --- a/package-lock.yml +++ /dev/null @@ -1,18 +0,0 @@ -packages: -- package: calogica/dbt_expectations - version: 0.8.2 -- package: dbt-labs/dbt_external_tables - version: 0.8.2 -- package: dbt-labs/dbt_utils - version: 1.0.0 -- git: https://github.com/FlipsideCrypto/fsc-utils.git - revision: eb33ac727af26ebc8a8cc9711d4a6ebc3790a107 -- package: get-select/dbt_snowflake_query_tags - version: 2.5.0 -- git: https://github.com/FlipsideCrypto/fsc-evm.git - revision: ec6adae14ab4060ad4a553fb7f32d7e57693996d -- package: calogica/dbt_date - version: 0.7.2 -- git: https://github.com/FlipsideCrypto/livequery-models.git - revision: b024188be4e9c6bc00ed77797ebdc92d351d620e -sha1_hash: 622a679ecf98e6ebf3c904241902ce5328c77e52 diff --git a/packages.yml b/packages.yml index 4fb6364..b812b15 100644 --- a/packages.yml +++ b/packages.yml @@ -6,8 +6,6 @@ packages: - package: dbt-labs/dbt_utils version: 1.0.0 - git: https://github.com/FlipsideCrypto/fsc-utils.git - revision: v1.29.0 + revision: v1.31.0 - package: get-select/dbt_snowflake_query_tags - version: [">=2.0.0", "<3.0.0"] - - git: https://github.com/FlipsideCrypto/fsc-evm.git - revision: v1.5.0 \ No newline at end of file + version: [">=2.0.0", "<3.0.0"] \ No newline at end of file