From 3f103dd94f2ee4d163cbd9ef290e39eddb873397 Mon Sep 17 00:00:00 2001 From: drethereum <71602799+drethereum@users.noreply.github.com> Date: Tue, 18 Feb 2025 13:57:28 -0700 Subject: [PATCH] AN-5774/sl2-migration-op (#331) * initial build * Decoder changes * dbt project * global node url * testing * comments * removed macro * prod integration --- .../dbt_run_streamline_chainhead.yml | 6 +- .../dbt_run_streamline_history_adhoc.yml | 2 +- .gitignore | 4 +- dbt_project.yml | 82 +++++++- macros/decoder/decoded_logs_history.sql | 34 ++-- macros/decoder/run_decoded_logs_history.sql | 6 +- ...eamline_external_table_queries_decoder.sql | 101 ++++++++++ .../streamline_external_table_queries.sql | 141 +++++++++++++ .../main_package/logging/bronze.sql | 36 ++++ .../main_package/logging/complete.sql | 29 +++ .../main_package/logging/logging.sql | 36 ++++ .../main_package/logging/requests.sql | 55 +++++ .../streamline/set_default_variables.sql | 47 +++++ .../streamline/set_streamline_parameters.sql | 57 ++++++ macros/streamline/models.sql | 119 ----------- .../bronze__potential_overflowed_traces.sql | 2 +- models/silver/core/silver__blocks.sql | 6 +- .../silver/core/silver__confirmed_blocks.sql | 6 +- models/silver/core/silver__decoded_logs.sql | 2 +- models/silver/core/silver__receipts.sql | 6 +- models/silver/core/silver__traces.sql | 14 +- models/silver/core/silver__transactions.sql | 6 +- .../traces2_fix/silver__fact_traces2_fix.sql | 163 --------------- models/sources.yml | 10 +- .../streamline/bronze/core/bronze__blocks.sql | 39 ++++ .../bronze/core/bronze__blocks_fr.sql | 41 ++++ .../bronze/core/bronze__blocks_fr_v1.sql | 40 ++++ .../bronze/core/bronze__blocks_fr_v2.sql | 40 ++++ .../bronze/core/bronze__confirm_blocks.sql | 39 ++++ .../bronze/core/bronze__confirm_blocks_fr.sql | 28 +++ .../core/bronze__confirm_blocks_fr_v1.sql | 40 ++++ .../core/bronze__confirm_blocks_fr_v2.sql | 40 ++++ .../bronze/core/bronze__receipts.sql | 39 ++++ .../bronze/core/bronze__receipts_fr.sql | 38 ++++ .../bronze/core/bronze__receipts_fr_v1.sql | 40 ++++ .../bronze/core/bronze__receipts_fr_v2.sql | 40 ++++ .../bronze/core/bronze__streamline_blocks.sql | 11 - .../bronze__streamline_confirm_blocks.sql | 9 - .../core/bronze__streamline_receipts.sql | 11 - .../bronze/core/bronze__streamline_traces.sql | 10 - .../core/bronze__streamline_transactions.sql | 11 - .../streamline/bronze/core/bronze__traces.sql | 39 ++++ .../bronze/core/bronze__traces_fr.sql | 30 +++ .../bronze/core/bronze__traces_fr_v1.sql | 40 ++++ .../bronze/core/bronze__traces_fr_v2.sql | 40 ++++ .../bronze/core/bronze__transactions.sql | 39 ++++ .../bronze/core/bronze__transactions_fr.sql | 41 ++++ .../core/bronze__transactions_fr_v1.sql | 40 ++++ .../core/bronze__transactions_fr_v2.sql | 40 ++++ .../core/fr/bronze__streamline_fr_blocks.sql | 11 - .../bronze__streamline_fr_confirm_blocks.sql | 9 - .../fr/bronze__streamline_fr_receipts.sql | 11 - .../core/fr/bronze__streamline_fr_traces.sql | 9 - .../fr/bronze__streamline_fr_transactions.sql | 11 - .../bronze/decoder/bronze__decoded_logs.sql | 58 ++---- .../decoder/bronze__decoded_logs_fr.sql | 20 ++ .../decoder/bronze__decoded_logs_fr_v1.sql | 23 +++ .../decoder/bronze__decoded_logs_fr_v2.sql | 23 +++ .../decoder/bronze__fr_decoded_logs.sql | 40 ---- .../streamline/bronze/streamline-sbx-setup.md | 106 ---------- .../streamline/silver/_max_block_by_hour.sql | 2 +- .../complete/streamline__blocks_complete.sql | 50 +++++ .../streamline__complete_confirmed_blocks.sql | 29 --- ...ine__complete_debug_traceBlockByNumber.sql | 33 --- ...line__complete_qn_getBlockWithReceipts.sql | 32 --- .../streamline__confirm_blocks_complete.sql | 50 +++++ .../streamline__receipts_complete.sql | 50 +++++ .../complete/streamline__traces_complete.sql | 50 +++++ .../streamline__transactions_complete.sql | 50 +++++ ...treamline__blocks_transactions_history.sql | 112 +++++++++++ .../streamline__confirm_blocks_history.sql | 133 ++++++++++++ ...line__debug_traceBlockByNumber_history.sql | 48 ----- ...mline__qn_getBlockWithReceipts_history.sql | 47 ----- .../history/streamline__receipts_history.sql | 113 +++++++++++ .../history/streamline__traces_history.sql | 113 +++++++++++ ...reamline__blocks_transactions_realtime.sql | 126 ++++++++++++ .../streamline__confirm_blocks_realtime.sql | 190 +++++++++++------- ...ine__debug_traceBlockByNumber_realtime.sql | 100 --------- ...line__qn_getBlockWithReceipts_realtime.sql | 104 ---------- .../streamline__receipts_realtime.sql | 130 ++++++++++++ .../realtime/streamline__traces_realtime.sql | 127 ++++++++++++ .../silver/core/retry/_missing_traces.sql | 32 +-- .../silver/core/streamline__blocks.sql | 40 ++-- .../silver/core/streamline__get_chainhead.sql | 54 +++++ .../silver/core/streamline__get_chainhead.yml | 9 + .../streamline__decoded_logs_complete.sql | 50 +++++ .../streamline__decoded_logs_realtime.sql | 110 ++++++++++ .../streamline__complete_decode_logs.sql | 32 --- .../streamline__decode_logs_realtime.sql | 80 -------- package-lock.yml | 18 -- packages.yml | 2 +- 91 files changed, 2977 insertions(+), 1255 deletions(-) create mode 100644 macros/fsc_evm_temp/decoder_package/streamline_external_table_queries_decoder.sql create mode 100644 macros/fsc_evm_temp/main_package/bronze/streamline_external_table_queries.sql create mode 100644 macros/fsc_evm_temp/main_package/logging/bronze.sql create mode 100644 macros/fsc_evm_temp/main_package/logging/complete.sql create mode 100644 macros/fsc_evm_temp/main_package/logging/logging.sql create mode 100644 macros/fsc_evm_temp/main_package/logging/requests.sql create mode 100644 macros/fsc_evm_temp/main_package/streamline/set_default_variables.sql create mode 100644 macros/fsc_evm_temp/main_package/streamline/set_streamline_parameters.sql delete mode 100644 macros/streamline/models.sql delete mode 100644 models/silver/core/traces2_fix/silver__fact_traces2_fix.sql create mode 100644 models/streamline/bronze/core/bronze__blocks.sql create mode 100644 models/streamline/bronze/core/bronze__blocks_fr.sql create mode 100644 models/streamline/bronze/core/bronze__blocks_fr_v1.sql create mode 100644 models/streamline/bronze/core/bronze__blocks_fr_v2.sql create mode 100644 models/streamline/bronze/core/bronze__confirm_blocks.sql create mode 100644 models/streamline/bronze/core/bronze__confirm_blocks_fr.sql create mode 100644 models/streamline/bronze/core/bronze__confirm_blocks_fr_v1.sql create mode 100644 models/streamline/bronze/core/bronze__confirm_blocks_fr_v2.sql create mode 100644 models/streamline/bronze/core/bronze__receipts.sql create mode 100644 models/streamline/bronze/core/bronze__receipts_fr.sql create mode 100644 models/streamline/bronze/core/bronze__receipts_fr_v1.sql create mode 100644 models/streamline/bronze/core/bronze__receipts_fr_v2.sql delete mode 100644 models/streamline/bronze/core/bronze__streamline_blocks.sql delete mode 100644 models/streamline/bronze/core/bronze__streamline_confirm_blocks.sql delete mode 100644 models/streamline/bronze/core/bronze__streamline_receipts.sql delete mode 100644 models/streamline/bronze/core/bronze__streamline_traces.sql delete mode 100644 models/streamline/bronze/core/bronze__streamline_transactions.sql create mode 100644 models/streamline/bronze/core/bronze__traces.sql create mode 100644 models/streamline/bronze/core/bronze__traces_fr.sql create mode 100644 models/streamline/bronze/core/bronze__traces_fr_v1.sql create mode 100644 models/streamline/bronze/core/bronze__traces_fr_v2.sql create mode 100644 models/streamline/bronze/core/bronze__transactions.sql create mode 100644 models/streamline/bronze/core/bronze__transactions_fr.sql create mode 100644 models/streamline/bronze/core/bronze__transactions_fr_v1.sql create mode 100644 models/streamline/bronze/core/bronze__transactions_fr_v2.sql delete mode 100644 models/streamline/bronze/core/fr/bronze__streamline_fr_blocks.sql delete mode 100644 models/streamline/bronze/core/fr/bronze__streamline_fr_confirm_blocks.sql delete mode 100644 models/streamline/bronze/core/fr/bronze__streamline_fr_receipts.sql delete mode 100644 models/streamline/bronze/core/fr/bronze__streamline_fr_traces.sql delete mode 100644 models/streamline/bronze/core/fr/bronze__streamline_fr_transactions.sql create mode 100644 models/streamline/bronze/decoder/bronze__decoded_logs_fr.sql create mode 100644 models/streamline/bronze/decoder/bronze__decoded_logs_fr_v1.sql create mode 100644 models/streamline/bronze/decoder/bronze__decoded_logs_fr_v2.sql delete mode 100644 models/streamline/bronze/decoder/bronze__fr_decoded_logs.sql delete mode 100644 models/streamline/bronze/streamline-sbx-setup.md create mode 100644 models/streamline/silver/core/complete/streamline__blocks_complete.sql delete mode 100644 models/streamline/silver/core/complete/streamline__complete_confirmed_blocks.sql delete mode 100644 models/streamline/silver/core/complete/streamline__complete_debug_traceBlockByNumber.sql delete mode 100644 models/streamline/silver/core/complete/streamline__complete_qn_getBlockWithReceipts.sql create mode 100644 models/streamline/silver/core/complete/streamline__confirm_blocks_complete.sql create mode 100644 models/streamline/silver/core/complete/streamline__receipts_complete.sql create mode 100644 models/streamline/silver/core/complete/streamline__traces_complete.sql create mode 100644 models/streamline/silver/core/complete/streamline__transactions_complete.sql create mode 100644 models/streamline/silver/core/history/streamline__blocks_transactions_history.sql create mode 100644 models/streamline/silver/core/history/streamline__confirm_blocks_history.sql delete mode 100644 models/streamline/silver/core/history/streamline__debug_traceBlockByNumber_history.sql delete mode 100644 models/streamline/silver/core/history/streamline__qn_getBlockWithReceipts_history.sql create mode 100644 models/streamline/silver/core/history/streamline__receipts_history.sql create mode 100644 models/streamline/silver/core/history/streamline__traces_history.sql create mode 100644 models/streamline/silver/core/realtime/streamline__blocks_transactions_realtime.sql delete mode 100644 models/streamline/silver/core/realtime/streamline__debug_traceBlockByNumber_realtime.sql delete mode 100644 models/streamline/silver/core/realtime/streamline__qn_getBlockWithReceipts_realtime.sql create mode 100644 models/streamline/silver/core/realtime/streamline__receipts_realtime.sql create mode 100644 models/streamline/silver/core/realtime/streamline__traces_realtime.sql create mode 100644 models/streamline/silver/core/streamline__get_chainhead.sql create mode 100644 models/streamline/silver/core/streamline__get_chainhead.yml create mode 100644 models/streamline/silver/decoded_logs/complete/streamline__decoded_logs_complete.sql create mode 100644 models/streamline/silver/decoded_logs/realtime/streamline__decoded_logs_realtime.sql delete mode 100644 models/streamline/silver/decoder/complete/streamline__complete_decode_logs.sql delete mode 100644 models/streamline/silver/decoder/realtime/streamline__decode_logs_realtime.sql delete mode 100644 package-lock.yml diff --git a/.github/workflows/dbt_run_streamline_chainhead.yml b/.github/workflows/dbt_run_streamline_chainhead.yml index 3c64e1ac..764fb1e8 100644 --- a/.github/workflows/dbt_run_streamline_chainhead.yml +++ b/.github/workflows/dbt_run_streamline_chainhead.yml @@ -43,4 +43,8 @@ jobs: dbt deps - name: Run DBT Jobs run: | - dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "optimism_models,tag:streamline_core_complete" "optimism_models,tag:streamline_core_realtime" \ No newline at end of file + dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "optimism_models,tag:streamline_core_complete" "optimism_models,tag:streamline_core_realtime" "optimism_models,tag:streamline_core_complete_receipts" "optimism_models,tag:streamline_core_realtime_receipts" "optimism_models,tag:streamline_core_complete_confirm_blocks" "optimism_models,tag:streamline_core_realtime_confirm_blocks" + + - name: Run Chainhead Tests + run: | + dbt test -m "optimism_models,tag:chainhead" \ No newline at end of file diff --git a/.github/workflows/dbt_run_streamline_history_adhoc.yml b/.github/workflows/dbt_run_streamline_history_adhoc.yml index bfe1c598..69faaae6 100644 --- a/.github/workflows/dbt_run_streamline_history_adhoc.yml +++ b/.github/workflows/dbt_run_streamline_history_adhoc.yml @@ -29,7 +29,7 @@ on: description: 'DBT Run Command' required: true options: - - dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "optimism_models,tag:streamline_core_complete" "optimism_models,tag:streamline_core_history" + - dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "optimism_models,tag:streamline_core_complete" "optimism_models,tag:streamline_core_history" "optimism_models,tag:streamline_core_complete_receipts" "optimism_models,tag:streamline_core_history_receipts" "optimism_models,tag:streamline_core_complete_confirm_blocks" "optimism_models,tag:streamline_core_history_confirm_blocks" - dbt run --threads 8 --vars '{"STREAMLINE_INVOKE_STREAMS":True,"WAIT":120}' -m "optimism_models,tag:streamline_decoded_logs_complete" "optimism_models,tag:streamline_decoded_logs_history" env: diff --git a/.gitignore b/.gitignore index 28c99ed5..1ebc2457 100644 --- a/.gitignore +++ b/.gitignore @@ -16,4 +16,6 @@ logs/ .vscode/ dbt-env/ .env -.user.yml \ No newline at end of file +.user.yml + +package-lock.yml \ No newline at end of file diff --git a/dbt_project.yml b/dbt_project.yml index 3fd16b1a..8f7d86c1 100644 --- a/dbt_project.yml +++ b/dbt_project.yml @@ -71,7 +71,85 @@ vars: HEAL_MODEL: False HEAL_MODELS: [] START_GHA_TASKS: False - API_INTEGRATION: '{{ var("config")[target.name]["API_INTEGRATION"] if var("config")[target.name] else var("config")["dev"]["API_INTEGRATION"] }}' + +#### STREAMLINE 2.0 BEGIN #### + + API_INTEGRATION: '{{ var("config")[target.name]["API_INTEGRATION"] if var("config")[target.name] else var("config")["dev"]["API_INTEGRATION"] }}' EXTERNAL_FUNCTION_URI: '{{ var("config")[target.name]["EXTERNAL_FUNCTION_URI"] if var("config")[target.name] else var("config")["dev"]["EXTERNAL_FUNCTION_URI"] }}' ROLES: | - ["INTERNAL_DEV"] \ No newline at end of file + ["INTERNAL_DEV"] + config: + # The keys correspond to dbt profiles and are case sensitive + dev: + API_INTEGRATION: AWS_OPTIMISM_API_STG_V2 + EXTERNAL_FUNCTION_URI: 0h08ox8fa4.execute-api.us-east-1.amazonaws.com/stg/ + ROLES: + - AWS_LAMBDA_OPTIMISM_API + - INTERNAL_DEV + + prod: + API_INTEGRATION: AWS_OPTIMISM_API_PROD_V2 + EXTERNAL_FUNCTION_URI: m8uegnks37.execute-api.us-east-1.amazonaws.com/prod/ + ROLES: + - AWS_LAMBDA_OPTIMISM_API + - INTERNAL_DEV + - DBT_CLOUD_OPTIMISM + +#### STREAMLINE 2.0 END #### + +#### FSC_EVM BEGIN #### +# Visit https://github.com/FlipsideCrypto/fsc-evm/wiki for more information on required and optional variables + + ### GLOBAL VARIABLES BEGIN ### + ## REQUIRED + GLOBAL_PROD_DB_NAME: 'optimism' + GLOBAL_NODE_SECRET_PATH: 'Vault/prod/optimism/quicknode/mainnet' + GLOBAL_NODE_URL: '{service}/{Authentication}' + GLOBAL_BLOCKS_PER_HOUR: 1800 + GLOBAL_USES_STREAMLINE_V1: True + GLOBAL_USES_SINGLE_FLIGHT_METHOD: True + + ### GLOBAL VARIABLES END ### + + ### MAIN_PACKAGE VARIABLES BEGIN ### + + ### CORE ### + ## REQUIRED + + ## OPTIONAL + # GOLD_FULL_REFRESH: True + # SILVER_FULL_REFRESH: True + # BRONZE_FULL_REFRESH: True + + # BLOCKS_COMPLETE_FULL_REFRESH: True + # CONFIRM_BLOCKS_COMPLETE_FULL_REFRESH: True + # TRACES_COMPLETE_FULL_REFRESH: True + # RECEIPTS_COMPLETE_FULL_REFRESH: True + # TRANSACTIONS_COMPLETE_FULL_REFRESH: True + + # BLOCKS_TRANSACTIONS_REALTIME_TESTING_LIMIT: 3 + # BLOCKS_TRANSACTIONS_HISTORY_TESTING_LIMIT: 3 + # TRACES_REALTIME_TESTING_LIMIT: 3 + # TRACES_HISTORY_TESTING_LIMIT: 3 + # ARBTRACE_BLOCK_HISTORY_TESTING_LIMIT: 3 + # RECEIPTS_REALTIME_TESTING_LIMIT: 3 + # RECEIPTS_HISTORY_TESTING_LIMIT: 3 + # CONFIRM_BLOCKS_REALTIME_TESTING_LIMIT: 3 + # CONFIRM_BLOCKS_HISTORY_TESTING_LIMIT: 3 + + # ### MAIN_PACKAGE VARIABLES END ### + + # ### DECODER_PACKAGE VARIABLES BEGIN ### + + # ## REQUIRED + + # ## OPTIONAL + + # DECODED_LOGS_COMPLETE_FULL_REFRESH: True + + # DECODED_LOGS_REALTIME_TESTING_LIMIT: 3 + # DECODED_LOGS_HISTORY_SQL_LIMIT: 1 #limit per monthly range + + ### DECODER_PACKAGE VARIABLES END ### + +#### FSC_EVM END #### \ No newline at end of file diff --git a/macros/decoder/decoded_logs_history.sql b/macros/decoder/decoded_logs_history.sql index 6246edc2..4fe2282d 100644 --- a/macros/decoder/decoded_logs_history.sql +++ b/macros/decoder/decoded_logs_history.sql @@ -1,29 +1,26 @@ {% macro decoded_logs_history(backfill_mode=false) %} {%- set params = { - "sql_limit": var("DECODED_LOGS_HISTORY_SQL_LIMIT", 7500000), + "sql_limit": var("DECODED_LOGS_HISTORY_SQL_LIMIT", 8000000), "producer_batch_size": var("DECODED_LOGS_HISTORY_PRODUCER_BATCH_SIZE", 400000), - "worker_batch_size": var("DECODED_LOGS_HISTORY_WORKER_BATCH_SIZE", 100000), - "producer_limit_size": var("DECODED_LOGS_HISTORY_WORKER_BATCH_SIZE", 1000000) + "worker_batch_size": var("DECODED_LOGS_HISTORY_WORKER_BATCH_SIZE", 100000) } -%} {% set wait_time = var("DECODED_LOGS_HISTORY_WAIT_TIME", 60) %} - {% set find_months_query %} SELECT DISTINCT date_trunc('month', block_timestamp)::date as month FROM {{ ref('core__fact_blocks') }} ORDER BY month ASC {% endset %} - {% set results = run_query(find_months_query) %} {% if execute %} {% set months = results.columns[0].values() %} - + {% for month in months %} {% set view_name = 'decoded_logs_history_' ~ month.strftime('%Y_%m') %} - + {% set create_view_query %} create or replace view streamline.{{view_name}} as ( WITH target_blocks AS ( @@ -46,7 +43,7 @@ ), existing_logs_to_exclude AS ( SELECT _log_id - FROM {{ ref('streamline__complete_decode_logs') }} l + FROM {{ ref('streamline__decoded_logs_complete') }} l INNER JOIN target_blocks b using (block_number) ), candidate_logs AS ( @@ -84,11 +81,9 @@ LIMIT {{ params.sql_limit }} ) {% endset %} - {# Create the view #} {% do run_query(create_view_query) %} {{ log("Created view for month " ~ month.strftime('%Y-%m'), info=True) }} - {% if var("STREAMLINE_INVOKE_STREAMS", false) %} {# Check if rows exist first #} {% set check_rows_query %} @@ -99,19 +94,22 @@ {% set has_rows = results.columns[0].values()[0] %} {% if has_rows %} - {# Invoke streamline since rows exist to decode #} + {# Invoke streamline, if rows exist to decode #} {% set decode_query %} - SELECT streamline.udf_bulk_decode_logs( - object_construct( - 'sql_source', '{{view_name}}', - 'producer_batch_size', {{ params.producer_batch_size }}, - 'producer_limit_size', {{ params.producer_limit_size }}) + SELECT + streamline.udf_bulk_decode_logs_v2( + PARSE_JSON( + $${ "external_table": "decoded_logs", + "producer_batch_size": {{ params.producer_batch_size }}, + "sql_limit": {{ params.sql_limit }}, + "sql_source": "{{view_name}}", + "worker_batch_size": {{ params.worker_batch_size }} }$$ + ) ); {% endset %} {% do run_query(decode_query) %} {{ log("Triggered decoding for month " ~ month.strftime('%Y-%m'), info=True) }} - {# Call wait since we actually did some decoding #} {% do run_query("call system$wait(" ~ wait_time ~ ")") %} {{ log("Completed wait after decoding for month " ~ month.strftime('%Y-%m'), info=True) }} @@ -119,7 +117,7 @@ {{ log("No rows to decode for month " ~ month.strftime('%Y-%m'), info=True) }} {% endif %} {% endif %} - + {% endfor %} {% endif %} diff --git a/macros/decoder/run_decoded_logs_history.sql b/macros/decoder/run_decoded_logs_history.sql index 185ac826..3954afb0 100644 --- a/macros/decoder/run_decoded_logs_history.sql +++ b/macros/decoder/run_decoded_logs_history.sql @@ -1,14 +1,14 @@ {% macro run_decoded_logs_history() %} +{% set blockchain = var('GLOBAL_PROD_DB_NAME','').lower() %} + {% set check_for_new_user_abis_query %} select 1 from {{ ref('silver__user_verified_abis') }} where _inserted_timestamp::date = sysdate()::date and dayname(sysdate()) <> 'Sat' {% endset %} - {% set results = run_query(check_for_new_user_abis_query) %} - {% if execute %} {% set new_user_abis = results.columns[0].values()[0] %} @@ -17,7 +17,7 @@ SELECT github_actions.workflow_dispatches( 'FlipsideCrypto', - 'optimism-models', + '{{ blockchain }}' || '-models', 'dbt_run_streamline_decoded_logs_history.yml', NULL ) diff --git a/macros/fsc_evm_temp/decoder_package/streamline_external_table_queries_decoder.sql b/macros/fsc_evm_temp/decoder_package/streamline_external_table_queries_decoder.sql new file mode 100644 index 00000000..318a3daa --- /dev/null +++ b/macros/fsc_evm_temp/decoder_package/streamline_external_table_queries_decoder.sql @@ -0,0 +1,101 @@ +{% macro streamline_external_table_query_decoder( + source_name, + source_version + ) %} + + {% if source_version != '' %} + {% set source_version = '_' ~ source_version.lower() %} + {% endif %} + + WITH meta AS ( + SELECT + job_created_time AS _inserted_timestamp, + file_name, + CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 6), '_', 1) AS INTEGER) AS _partition_by_block_number, + TO_DATE( + concat_ws('-', SPLIT_PART(file_name, '/', 3), SPLIT_PART(file_name, '/', 4), SPLIT_PART(file_name, '/', 5)) + ) AS _partition_by_created_date + FROM + TABLE( + information_schema.external_table_file_registration_history( + start_time => DATEADD('day', -3, CURRENT_TIMESTAMP()), + table_name => '{{ source( "bronze_streamline", source_name ~ source_version) }}') + ) A + ) + SELECT + block_number, + id :: STRING AS id, + DATA, + metadata, + b.file_name, + _inserted_timestamp, + s._partition_by_block_number AS _partition_by_block_number, + s._partition_by_created_date AS _partition_by_created_date + FROM + {{ source( + "bronze_streamline", + source_name ~ source_version + ) }} + s + JOIN meta b + ON b.file_name = metadata$filename + AND b._partition_by_block_number = s._partition_by_block_number + AND b._partition_by_created_date = s._partition_by_created_date + WHERE + b._partition_by_block_number = s._partition_by_block_number + AND b._partition_by_created_date = s._partition_by_created_date + AND s._partition_by_created_date >= DATEADD('day', -2, CURRENT_TIMESTAMP()) + AND DATA :error IS NULL + AND DATA IS NOT NULL +{% endmacro %} + + +{% macro streamline_external_table_query_decoder_fr( + source_name, + source_version + ) %} + + {% if source_version != '' %} + {% set source_version = '_' ~ source_version.lower() %} + {% endif %} + + WITH meta AS ( + SELECT + registered_on AS _inserted_timestamp, + file_name, + CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 6), '_', 1) AS INTEGER) AS _partition_by_block_number, + TO_DATE( + concat_ws('-', SPLIT_PART(file_name, '/', 3), SPLIT_PART(file_name, '/', 4), SPLIT_PART(file_name, '/', 5)) + ) AS _partition_by_created_date + FROM + TABLE( + information_schema.external_table_files( + table_name => '{{ source( "bronze_streamline", source_name ~ source_version) }}' + ) + ) A + ) +SELECT + block_number, + id :: STRING AS id, + DATA, + metadata, + b.file_name, + _inserted_timestamp, + s._partition_by_block_number AS _partition_by_block_number, + s._partition_by_created_date AS _partition_by_created_date +FROM + {{ source( + "bronze_streamline", + source_name ~ source_version + ) }} + s + JOIN meta b + ON b.file_name = metadata$filename + AND b._partition_by_block_number = s._partition_by_block_number + AND b._partition_by_created_date = s._partition_by_created_date +WHERE + b._partition_by_block_number = s._partition_by_block_number + AND b._partition_by_created_date = s._partition_by_created_date + AND DATA :error IS NULL + AND DATA IS NOT NULL +{% endmacro %} diff --git a/macros/fsc_evm_temp/main_package/bronze/streamline_external_table_queries.sql b/macros/fsc_evm_temp/main_package/bronze/streamline_external_table_queries.sql new file mode 100644 index 00000000..c6f7919d --- /dev/null +++ b/macros/fsc_evm_temp/main_package/bronze/streamline_external_table_queries.sql @@ -0,0 +1,141 @@ +{% macro streamline_external_table_query( + source_name, + source_version, + partition_function, + balances, + block_number, + uses_receipts_by_hash + ) %} + + {% if source_version != '' %} + {% set source_version = '_' ~ source_version.lower() %} + {% endif %} + + WITH meta AS ( + SELECT + job_created_time AS _inserted_timestamp, + file_name, + {{ partition_function }} AS partition_key + FROM + TABLE( + information_schema.external_table_file_registration_history( + start_time => DATEADD('day', -3, CURRENT_TIMESTAMP()), + table_name => '{{ source( "bronze_streamline", source_name ~ source_version) }}') + ) A + ) + SELECT + s.*, + b.file_name, + b._inserted_timestamp + + {% if balances %}, + r.block_timestamp :: TIMESTAMP AS block_timestamp + {% endif %} + + {% if block_number %}, + COALESCE( + s.value :"BLOCK_NUMBER" :: STRING, + s.metadata :request :"data" :id :: STRING, + PARSE_JSON( + s.metadata :request :"data" + ) :id :: STRING + ) :: INT AS block_number + {% endif %} + {% if uses_receipts_by_hash %}, + s.value :"TX_HASH" :: STRING AS tx_hash + {% endif %} + FROM + {{ source( + "bronze_streamline", + source_name ~ source_version + ) }} + s + JOIN meta b + ON b.file_name = metadata$filename + AND b.partition_key = s.partition_key + + {% if balances %} + JOIN {{ ref('_block_ranges') }} + r + ON r.block_number = COALESCE( + s.value :"BLOCK_NUMBER" :: INT, + s.value :"block_number" :: INT + ) + {% endif %} + WHERE + b.partition_key = s.partition_key + AND DATA :error IS NULL + AND DATA IS NOT NULL +{% endmacro %} + +{% macro streamline_external_table_query_fr( + source_name, + source_version, + partition_function, + partition_join_key, + balances, + block_number, + uses_receipts_by_hash + ) %} + + {% if source_version != '' %} + {% set source_version = '_' ~ source_version.lower() %} + {% endif %} + + WITH meta AS ( + SELECT + registered_on AS _inserted_timestamp, + file_name, + {{ partition_function }} AS partition_key + FROM + TABLE( + information_schema.external_table_files( + table_name => '{{ source( "bronze_streamline", source_name ~ source_version) }}' + ) + ) A + ) +SELECT + s.*, + b.file_name, + b._inserted_timestamp + + {% if balances %}, + r.block_timestamp :: TIMESTAMP AS block_timestamp +{% endif %} + +{% if block_number %}, + COALESCE( + s.value :"BLOCK_NUMBER" :: STRING, + s.value :"block_number" :: STRING, + s.metadata :request :"data" :id :: STRING, + PARSE_JSON( + s.metadata :request :"data" + ) :id :: STRING + ) :: INT AS block_number +{% endif %} +{% if uses_receipts_by_hash %}, + s.value :"TX_HASH" :: STRING AS tx_hash +{% endif %} +FROM + {{ source( + "bronze_streamline", + source_name ~ source_version + ) }} + s + JOIN meta b + ON b.file_name = metadata$filename + AND b.partition_key = s.{{ partition_join_key }} + + {% if balances %} + JOIN {{ ref('_block_ranges') }} + r + ON r.block_number = COALESCE( + s.value :"BLOCK_NUMBER" :: INT, + s.value :"block_number" :: INT + ) + {% endif %} +WHERE + b.partition_key = s.{{ partition_join_key }} + AND DATA :error IS NULL + AND DATA IS NOT NULL +{% endmacro %} diff --git a/macros/fsc_evm_temp/main_package/logging/bronze.sql b/macros/fsc_evm_temp/main_package/logging/bronze.sql new file mode 100644 index 00000000..b367deb5 --- /dev/null +++ b/macros/fsc_evm_temp/main_package/logging/bronze.sql @@ -0,0 +1,36 @@ +{% macro log_bronze_details(source_name, source_version, model_type, partition_function, partition_join_key, block_number, uses_receipts_by_hash) %} + +{% if source_version != '' %} + {% set source_version = '_' ~ source_version.lower() %} +{% endif %} +{% if model_type != '' %} + {% set model_type = '_' ~ model_type %} +{% endif %} + +{%- if flags.WHICH == 'compile' and execute -%} + + {{ log("=== Current Variable Settings ===", info=True) }} + {{ log(source_name ~ model_type ~ '_PARTITION_FUNCTION: ' ~ partition_function, info=True) }} + {{ log(source_name ~ model_type ~ '_PARTITION_JOIN_KEY: ' ~ partition_join_key, info=True) }} + {{ log(source_name ~ model_type ~ '_BLOCK_NUMBER: ' ~ block_number, info=True) }} + {% if uses_receipts_by_hash %} + {{ log("USES_RECEIPTS_BY_HASH: " ~ uses_receipts_by_hash, info=True) }} + {% endif %} + + {{ log("", info=True) }} + {{ log("=== Source Details ===", info=True) }} + {{ log("Source: " ~ source('bronze_streamline', source_name.lower() ~ source_version.lower()), info=True) }} + {{ log("", info=True) }} + + {% set config_log = '\n' %} + {% set config_log = config_log ~ '\n=== DBT Model Config ===\n'%} + {% set config_log = config_log ~ '\n{{ config (\n' %} + {% set config_log = config_log ~ ' materialized = "' ~ config.get('materialized') ~ '",\n' %} + {% set config_log = config_log ~ ' tags = ' ~ config.get('tags') | tojson ~ '\n' %} + {% set config_log = config_log ~ ') }}\n' %} + {{ log(config_log, info=True) }} + {{ log("", info=True) }} + +{%- endif -%} + +{% endmacro %} \ No newline at end of file diff --git a/macros/fsc_evm_temp/main_package/logging/complete.sql b/macros/fsc_evm_temp/main_package/logging/complete.sql new file mode 100644 index 00000000..3637b41e --- /dev/null +++ b/macros/fsc_evm_temp/main_package/logging/complete.sql @@ -0,0 +1,29 @@ +{% macro log_complete_details(post_hook, full_refresh_type, uses_receipts_by_hash) %} + +{%- if flags.WHICH == 'compile' and execute -%} + + {% if uses_receipts_by_hash %} + + {{ log("=== Current Variable Settings ===", info=True) }} + {{ log("USES_RECEIPTS_BY_HASH: " ~ uses_receipts_by_hash, info=True) }} + + {% endif %} + + {% set config_log = '\n' %} + {% set config_log = config_log ~ '\n=== DBT Model Config ===\n'%} + {% set config_log = config_log ~ '\n{{ config (\n' %} + {% set config_log = config_log ~ ' materialized = "' ~ config.get('materialized') ~ '",\n' %} + {% set config_log = config_log ~ ' unique_key = "' ~ config.get('unique_key') ~ '",\n' %} + {% set config_log = config_log ~ ' cluster_by = "' ~ config.get('cluster_by') ~ '",\n' %} + {% set config_log = config_log ~ ' merge_update_columns = ' ~ config.get('merge_update_columns') | tojson ~ ',\n' %} + {% set config_log = config_log ~ ' post_hook = "' ~ post_hook ~ '",\n' %} + {% set config_log = config_log ~ ' incremental_predicates = ' ~ config.get('incremental_predicates') | tojson ~ ',\n' %} + {% set config_log = config_log ~ ' full_refresh = ' ~ full_refresh_type ~ ',\n' %} + {% set config_log = config_log ~ ' tags = ' ~ config.get('tags') | tojson ~ '\n' %} + {% set config_log = config_log ~ ') }}\n' %} + {{ log(config_log, info=True) }} + {{ log("", info=True) }} + +{%- endif -%} + +{% endmacro %} \ No newline at end of file diff --git a/macros/fsc_evm_temp/main_package/logging/logging.sql b/macros/fsc_evm_temp/main_package/logging/logging.sql new file mode 100644 index 00000000..f3686852 --- /dev/null +++ b/macros/fsc_evm_temp/main_package/logging/logging.sql @@ -0,0 +1,36 @@ +{% macro log_model_details(vars=false, params=false) %} + +{%- if execute -%} +/* +DBT Model Config: +{{ model.config | tojson(indent=2) }} +*/ + +{% if vars is not false %} + +{% if var('LOG_MODEL_DETAILS', false) %} +{{ log( vars | tojson(indent=2), info=True) }} +{% endif %} +/* +Variables: +{{ vars | tojson(indent=2) }} +*/ +{% endif %} + +{% if params is not false %} + +{% if var('LOG_MODEL_DETAILS', false) %} +{{ log( params | tojson(indent=2), info=True) }} +{% endif %} +/* +Parameters: +{{ params | tojson(indent=2) }} +*/ +{% endif %} + +/* +Raw Code: +{{ model.raw_code }} +*/ +{%- endif -%} +{% endmacro %} \ No newline at end of file diff --git a/macros/fsc_evm_temp/main_package/logging/requests.sql b/macros/fsc_evm_temp/main_package/logging/requests.sql new file mode 100644 index 00000000..85cbbda1 --- /dev/null +++ b/macros/fsc_evm_temp/main_package/logging/requests.sql @@ -0,0 +1,55 @@ +{% macro log_streamline_details(model_name, model_type, node_url, model_quantum_state, sql_limit, testing_limit, order_by_clause, new_build, streamline_params, uses_receipts_by_hash, method, method_params, min_block=0) %} + +{%- if flags.WHICH == 'compile' and execute -%} + + {{ log("=== Current Variable Settings ===", info=True) }} + {{ log("START_UP_BLOCK: " ~ min_block, info=True) }} + {{ log("", info=True) }} + + {{ log("=== API Details ===", info=True) }} + + {{ log("NODE_URL: " ~ node_url, info=True) }} + {{ log("NODE_SECRET_PATH: " ~ var('GLOBAL_NODE_SECRET_PATH'), info=True) }} + {{ log("", info=True) }} + + {{ log("=== Current Variable Settings ===", info=True) }} + + {{ log((model_name ~ '_' ~ model_type ~ '_model_quantum_state').upper() ~ ': ' ~ model_quantum_state, info=True) }} + {{ log((model_name ~ '_' ~ model_type ~ '_sql_limit').upper() ~ ': ' ~ sql_limit, info=True) }} + {{ log((model_name ~ '_' ~ model_type ~ '_testing_limit').upper() ~ ': ' ~ testing_limit, info=True) }} + {{ log((model_name ~ '_' ~ model_type ~ '_order_by_clause').upper() ~ ': ' ~ order_by_clause, info=True) }} + {{ log((model_name ~ '_' ~ model_type ~ '_new_build').upper() ~ ': ' ~ new_build, info=True) }} + {{ log('USES_RECEIPTS_BY_HASH' ~ ': ' ~ uses_receipts_by_hash, info=True) }} + {{ log("", info=True) }} + + {{ log("=== RPC Details ===", info=True) }} + + {{ log(model_name ~ ": {", info=True) }} + {{ log(" method: '" ~ method ~ "',", info=True) }} + {{ log(" method_params: " ~ method_params, info=True) }} + {{ log("}", info=True) }} + {{ log("", info=True) }} + + {% set params_str = streamline_params | tojson %} + {% set params_formatted = params_str | replace('{', '{\n ') | replace('}', '\n }') | replace(', ', ',\n ') %} + + {# Clean up the method_params formatting #} + {% set params_formatted = params_formatted | replace('"method_params": "', '"method_params": "') | replace('\\n', ' ') | replace('\\u0027', "'") %} + + {% set config_log = '\n' %} + {% set config_log = config_log ~ '\n=== DBT Model Config ===\n'%} + {% set config_log = config_log ~ '\n{{ config (\n' %} + {% set config_log = config_log ~ ' materialized = "' ~ config.get('materialized') ~ '",\n' %} + {% set config_log = config_log ~ ' post_hook = fsc_utils.if_data_call_function_v2(\n' %} + {% set config_log = config_log ~ ' func = "streamline.udf_bulk_rest_api_v2",\n' %} + {% set config_log = config_log ~ ' target = "' ~ this.schema ~ '.' ~ this.identifier ~ '",\n' %} + {% set config_log = config_log ~ ' params = ' ~ params_formatted ~ '\n' %} + {% set config_log = config_log ~ ' ),\n' %} + {% set config_log = config_log ~ ' tags = ' ~ config.get('tags') | tojson ~ '\n' %} + {% set config_log = config_log ~ ') }}\n' %} + {{ log(config_log, info=True) }} + {{ log("", info=True) }} + +{%- endif -%} + +{% endmacro %} \ No newline at end of file diff --git a/macros/fsc_evm_temp/main_package/streamline/set_default_variables.sql b/macros/fsc_evm_temp/main_package/streamline/set_default_variables.sql new file mode 100644 index 00000000..598c3b95 --- /dev/null +++ b/macros/fsc_evm_temp/main_package/streamline/set_default_variables.sql @@ -0,0 +1,47 @@ +{% macro set_default_variables_streamline(model_name, model_type) %} + +{%- set node_url = var('GLOBAL_NODE_URL', '{Service}/{Authentication}') -%} +{%- set node_secret_path = var('GLOBAL_NODE_SECRET_PATH', '') -%} +{%- set model_quantum_state = var((model_name ~ '_' ~ model_type ~ '_quantum_state').upper(), 'streamline') -%} +{%- set testing_limit = var((model_name ~ '_' ~ model_type ~ '_testing_limit').upper(), none) -%} +{%- set new_build = var((model_name ~ '_' ~ model_type ~ '_new_build').upper(), false) -%} +{%- set default_order = 'ORDER BY partition_key DESC, block_number DESC' if model_type.lower() == 'realtime' + else 'ORDER BY partition_key ASC, block_number ASC' -%} +{%- set order_by_clause = var((model_name ~ '_' ~ model_type ~ '_order_by_clause').upper(), default_order) -%} +{%- set uses_receipts_by_hash = var('GLOBAL_USES_RECEIPTS_BY_HASH', false) -%} + +{%- set variables = { + 'node_url': node_url, + 'node_secret_path': node_secret_path, + 'model_quantum_state': model_quantum_state, + 'testing_limit': testing_limit, + 'new_build': new_build, + 'order_by_clause': order_by_clause, + 'uses_receipts_by_hash': uses_receipts_by_hash +} -%} + +{{ return(variables) }} + +{% endmacro %} + +{% macro set_default_variables_bronze(source_name, model_type) %} + +{%- set partition_function = var(source_name ~ model_type ~ '_PARTITION_FUNCTION', + "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER)") +-%} +{%- set partition_join_key = var(source_name ~ model_type ~ '_PARTITION_JOIN_KEY', 'partition_key') -%} +{%- set block_number = var(source_name ~ model_type ~ '_BLOCK_NUMBER', true) -%} +{%- set balances = var(source_name ~ model_type ~ '_BALANCES', false) -%} +{%- set uses_receipts_by_hash = var('GLOBAL_USES_RECEIPTS_BY_HASH', false) -%} + +{%- set variables = { + 'partition_function': partition_function, + 'partition_join_key': partition_join_key, + 'block_number': block_number, + 'balances': balances, + 'uses_receipts_by_hash': uses_receipts_by_hash +} -%} + +{{ return(variables) }} + +{% endmacro %} \ No newline at end of file diff --git a/macros/fsc_evm_temp/main_package/streamline/set_streamline_parameters.sql b/macros/fsc_evm_temp/main_package/streamline/set_streamline_parameters.sql new file mode 100644 index 00000000..9c7262f4 --- /dev/null +++ b/macros/fsc_evm_temp/main_package/streamline/set_streamline_parameters.sql @@ -0,0 +1,57 @@ +{% macro set_streamline_parameters(model_name, model_type, multiplier=1) %} + +{%- set rpc_config_details = { + "blocks_transactions": { + "method": 'eth_getBlockByNumber', + "method_params": 'ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number), TRUE)', + "exploded_key": ['result', 'result.transactions'] + }, + "receipts_by_hash": { + "method": 'eth_getTransactionReceipt', + "method_params": 'ARRAY_CONSTRUCT(tx_hash)' + }, + "receipts": { + "method": 'eth_getBlockReceipts', + "method_params": 'ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number))', + "exploded_key": ['result'], + "lambdas": 2 + + }, + "traces": { + "method": 'debug_traceBlockByNumber', + "method_params": "ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number), OBJECT_CONSTRUCT('tracer', 'callTracer', 'timeout', '120s'))", + "exploded_key": ['result'], + "lambdas": 2 + }, + "confirm_blocks": { + "method": 'eth_getBlockByNumber', + "method_params": 'ARRAY_CONSTRUCT(utils.udf_int_to_hex(block_number), FALSE)' + } +} -%} + +{%- set rpc_config = rpc_config_details[model_name.lower()] -%} + +{%- set params = { + "external_table": var((model_name ~ '_' ~ model_type ~ '_external_table').upper(), model_name.lower()), + "sql_limit": var((model_name ~ '_' ~ model_type ~ '_sql_limit').upper(), 2 * var('GLOBAL_BLOCKS_PER_HOUR',0) * multiplier), + "producer_batch_size": var((model_name ~ '_' ~ model_type ~ '_producer_batch_size').upper(), 2 * var('GLOBAL_BLOCKS_PER_HOUR',0) * multiplier), + "worker_batch_size": var( + (model_name ~ '_' ~ model_type ~ '_worker_batch_size').upper(), + (2 * var('GLOBAL_BLOCKS_PER_HOUR',0) * multiplier) // (rpc_config.get('lambdas', 1)) + ), + "sql_source": (model_name ~ '_' ~ model_type).lower(), + "method": rpc_config['method'], + "method_params": rpc_config['method_params'] +} -%} + +{%- if rpc_config.get('exploded_key') is not none -%} + {%- do params.update({"exploded_key": tojson(rpc_config['exploded_key'])}) -%} +{%- endif -%} + +{%- if rpc_config.get('lambdas') is not none -%} + {%- do params.update({"lambdas": rpc_config['lambdas']}) -%} +{%- endif -%} + +{{ return(params) }} + +{% endmacro %} \ No newline at end of file diff --git a/macros/streamline/models.sql b/macros/streamline/models.sql deleted file mode 100644 index 1f01fdbc..00000000 --- a/macros/streamline/models.sql +++ /dev/null @@ -1,119 +0,0 @@ -{% macro streamline_external_table_query( - model, - partition_function, - partition_name, - unique_key - ) %} - WITH meta AS ( - SELECT - last_modified AS _inserted_timestamp, - file_name, - {{ partition_function }} AS {{ partition_name }} - FROM - TABLE( - information_schema.external_table_file_registration_history( - start_time => DATEADD('day', -3, CURRENT_TIMESTAMP()), - table_name => '{{ source( "bronze_streamline", model) }}') - ) A - ) - SELECT - {{ unique_key }}, - DATA, - _inserted_timestamp, - MD5( - CAST( - COALESCE(CAST({{ unique_key }} AS text), '' :: STRING) AS text - ) - ) AS id, - s.{{ partition_name }}, - s.value AS VALUE, - file_name - FROM - {{ source( - "bronze_streamline", - model - ) }} - s - JOIN meta b - ON b.file_name = metadata$filename - AND b.{{ partition_name }} = s.{{ partition_name }} - WHERE - b.{{ partition_name }} = s.{{ partition_name }} - AND ( - DATA :error :code IS NULL - OR DATA :error :code NOT IN ( - '-32000', - '-32001', - '-32002', - '-32003', - '-32004', - '-32005', - '-32006', - '-32007', - '-32008', - '-32009', - '-32010', - '-32608' - ) - ) -{% endmacro %} - -{% macro streamline_external_table_fr_query( - model, - partition_function, - partition_name, - unique_key - ) %} - WITH meta AS ( - SELECT - registered_on AS _inserted_timestamp, - file_name, - {{ partition_function }} AS {{ partition_name }} - FROM - TABLE( - information_schema.external_table_files( - table_name => '{{ source( "bronze_streamline", model) }}' - ) - ) A - ) -SELECT - {{ unique_key }}, - DATA, - _inserted_timestamp, - MD5( - CAST( - COALESCE(CAST({{ unique_key }} AS text), '' :: STRING) AS text - ) - ) AS id, - s.{{ partition_name }}, - s.value AS VALUE, - file_name -FROM - {{ source( - "bronze_streamline", - model - ) }} - s - JOIN meta b - ON b.file_name = metadata$filename - AND b.{{ partition_name }} = s.{{ partition_name }} -WHERE - b.{{ partition_name }} = s.{{ partition_name }} - AND ( - DATA :error :code IS NULL - OR DATA :error :code NOT IN ( - '-32000', - '-32001', - '-32002', - '-32003', - '-32004', - '-32005', - '-32006', - '-32007', - '-32008', - '-32009', - '-32010', - '-32608' - ) - ) -{% endmacro %} diff --git a/models/bronze/overflow/bronze__potential_overflowed_traces.sql b/models/bronze/overflow/bronze__potential_overflowed_traces.sql index 95bde872..5cec52e2 100644 --- a/models/bronze/overflow/bronze__potential_overflowed_traces.sql +++ b/models/bronze/overflow/bronze__potential_overflowed_traces.sql @@ -46,7 +46,7 @@ missing_txs AS ( block_number, tx_position ) - JOIN {{ ref("streamline__complete_debug_traceBlockByNumber") }} USING (block_number) + JOIN {{ ref("streamline__traces_complete") }} USING (block_number) LEFT JOIN {{ source( 'optimism_silver', 'overflowed_traces' diff --git a/models/silver/core/silver__blocks.sql b/models/silver/core/silver__blocks.sql index 1af0aee8..f412a3b0 100644 --- a/models/silver/core/silver__blocks.sql +++ b/models/silver/core/silver__blocks.sql @@ -1,4 +1,4 @@ --- depends_on: {{ ref('bronze__streamline_blocks') }} +-- depends_on: {{ ref('bronze__blocks') }} {{ config( materialized = 'incremental', unique_key = "block_number", @@ -61,7 +61,7 @@ SELECT FROM {% if is_incremental() %} -{{ ref('bronze__streamline_blocks') }} +{{ ref('bronze__blocks') }} WHERE _inserted_timestamp >= ( SELECT @@ -70,7 +70,7 @@ WHERE {{ this }} ) {% else %} - {{ ref('bronze__streamline_fr_blocks') }} + {{ ref('bronze__blocks_fr') }} {% endif %} qualify(ROW_NUMBER() over (PARTITION BY block_number diff --git a/models/silver/core/silver__confirmed_blocks.sql b/models/silver/core/silver__confirmed_blocks.sql index d060ff38..727c5ca8 100644 --- a/models/silver/core/silver__confirmed_blocks.sql +++ b/models/silver/core/silver__confirmed_blocks.sql @@ -1,4 +1,4 @@ --- depends_on: {{ ref('bronze__streamline_confirm_blocks') }} +-- depends_on: {{ ref('bronze__confirm_blocks') }} {{ config( materialized = 'incremental', incremental_strategy = 'delete+insert', @@ -18,7 +18,7 @@ WITH base AS ( FROM {% if is_incremental() %} -{{ ref('bronze__streamline_confirm_blocks') }} +{{ ref('bronze__confirm_blocks') }} WHERE _inserted_timestamp >= ( SELECT @@ -32,7 +32,7 @@ WHERE {{ this }} ) {% else %} - {{ ref('bronze__streamline_fr_confirm_blocks') }} + {{ ref('bronze__confirm_blocks_fr') }} {% endif %} qualify(ROW_NUMBER() over (PARTITION BY block_number diff --git a/models/silver/core/silver__decoded_logs.sql b/models/silver/core/silver__decoded_logs.sql index b341f7fb..5b6c1462 100644 --- a/models/silver/core/silver__decoded_logs.sql +++ b/models/silver/core/silver__decoded_logs.sql @@ -42,7 +42,7 @@ WHERE ) AND DATA NOT ILIKE '%Event topic is not present in given ABI%' {% else %} - {{ ref('bronze__fr_decoded_logs') }} + {{ ref('bronze__decoded_logs_fr') }} WHERE _partition_by_block_number <= 105235063 --bedrock AND DATA NOT ILIKE '%Event topic is not present in given ABI%' diff --git a/models/silver/core/silver__receipts.sql b/models/silver/core/silver__receipts.sql index bca802f8..80f73db0 100644 --- a/models/silver/core/silver__receipts.sql +++ b/models/silver/core/silver__receipts.sql @@ -1,4 +1,4 @@ --- depends_on: {{ ref('bronze__streamline_receipts') }} +-- depends_on: {{ ref('bronze__receipts') }} {{ config( materialized = 'incremental', incremental_strategy = 'delete+insert', @@ -18,7 +18,7 @@ WITH base AS ( FROM {% if is_incremental() %} -{{ ref('bronze__streamline_receipts') }} +{{ ref('bronze__receipts') }} WHERE _inserted_timestamp >= ( SELECT @@ -28,7 +28,7 @@ WHERE ) AND IS_OBJECT(DATA) {% else %} - {{ ref('bronze__streamline_fr_receipts') }} + {{ ref('bronze__receipts_fr') }} WHERE IS_OBJECT(DATA) {% endif %} diff --git a/models/silver/core/silver__traces.sql b/models/silver/core/silver__traces.sql index 73e645ee..f4d7c96b 100644 --- a/models/silver/core/silver__traces.sql +++ b/models/silver/core/silver__traces.sql @@ -1,4 +1,4 @@ --- depends_on: {{ ref('bronze__streamline_traces') }} +-- depends_on: {{ ref('bronze__traces') }} {{ config ( materialized = "incremental", incremental_strategy = 'delete+insert', @@ -8,11 +8,7 @@ tags = ['core','non_realtime'], full_refresh = false ) }} -{# {{ fsc_evm.silver_traces_v1( -full_reload_start_block = 30000000, -full_reload_blocks = 10000000 -) }} -#} + WITH bronze_traces AS ( SELECT @@ -24,7 +20,7 @@ WITH bronze_traces AS ( FROM {% if is_incremental() and not full_reload_mode %} -{{ ref('bronze__streamline_traces') }} +{{ ref('bronze__traces') }} WHERE _inserted_timestamp >= ( SELECT @@ -33,7 +29,7 @@ WHERE {{ this }} ) AND DATA :result IS NOT NULL {% elif is_incremental() and full_reload_mode %} - {{ ref('bronze__streamline_fr_traces') }} + {{ ref('bronze__traces_fr') }} WHERE _partition_by_block_id BETWEEN ( SELECT @@ -48,7 +44,7 @@ WHERE {{ this }} ) {% else %} - {{ ref('bronze__streamline_fr_traces') }} + {{ ref('bronze__traces_fr') }} WHERE _partition_by_block_id <= 30000000 {% endif %} diff --git a/models/silver/core/silver__transactions.sql b/models/silver/core/silver__transactions.sql index a8972e20..0b670aa7 100644 --- a/models/silver/core/silver__transactions.sql +++ b/models/silver/core/silver__transactions.sql @@ -1,4 +1,4 @@ --- depends_on: {{ ref('bronze__streamline_transactions') }} +-- depends_on: {{ ref('bronze__transactions') }} {{ config( materialized = 'incremental', incremental_strategy = 'delete+insert', @@ -17,7 +17,7 @@ WITH base AS ( FROM {% if is_incremental() %} -{{ ref('bronze__streamline_transactions') }} +{{ ref('bronze__transactions') }} WHERE _inserted_timestamp >= ( SELECT @@ -27,7 +27,7 @@ WHERE ) AND IS_OBJECT(DATA) {% else %} - {{ ref('bronze__streamline_fr_transactions') }} + {{ ref('bronze__transactions_fr') }} WHERE IS_OBJECT(DATA) {% endif %} diff --git a/models/silver/core/traces2_fix/silver__fact_traces2_fix.sql b/models/silver/core/traces2_fix/silver__fact_traces2_fix.sql deleted file mode 100644 index da6c3966..00000000 --- a/models/silver/core/traces2_fix/silver__fact_traces2_fix.sql +++ /dev/null @@ -1,163 +0,0 @@ -{# {{ config ( -materialized = "incremental", -incremental_strategy = 'delete+insert', -unique_key = ["block_number", "tx_position", "trace_address"], -tags = ['traces_fix'] -) }} - -{% set batch_query %} -SELECT - MAX(next_batch_id) AS next_batch_id -FROM - ( - SELECT - 1 AS next_batch_id - -{% if is_incremental() %} -UNION ALL -SELECT - COALESCE(MAX(batch_id), 0) + 1 AS next_batch_id -FROM - {{ this }} -{% endif %}) {% endset %} -{% if execute %} - {% set result = run_query(batch_query) %} - {{ log( - "Debug - Batch Query result: " ~ result, - info = True - ) }} - - {% set batch_id = result.columns [0] [0] %} - {% if batch_id > 261 %} - {{ exceptions.raise_compiler_error("Processing complete - reached max batch_id of 261") }} - {% endif %} - - {% set block_size = 500000 %} - {% set block_start = 1 + ( - batch_id - 1 - ) * block_size %} - {% set block_end = batch_id * block_size %} - {{ log( - "Processing batch_id: " ~ batch_id ~ ", blocks: " ~ block_start ~ " to " ~ block_end, - info = True - ) }} -{% endif %} - -WITH silver_traces AS ( - SELECT - block_number, - tx_position, - trace_address, - parent_trace_address, - trace_json - FROM - {{ ref('silver__traces2') }} - WHERE - block_number BETWEEN {{ block_start }} - AND {{ block_end }} -), -errored_traces AS ( - SELECT - block_number, - tx_position, - trace_address, - trace_json - FROM - silver_traces - WHERE - trace_json :error :: STRING IS NOT NULL -), -error_logic AS ( - SELECT - b0.block_number, - b0.tx_position, - b0.trace_address, - b0.trace_json :error :: STRING AS error, - b1.trace_json :error :: STRING AS any_error, - b2.trace_json :error :: STRING AS origin_error - FROM - silver_traces b0 - LEFT JOIN errored_traces b1 - ON b0.block_number = b1.block_number - AND b0.tx_position = b1.tx_position - AND b0.trace_address RLIKE CONCAT( - '^', - b1.trace_address, - '(_[0-9]+)*$' - ) - LEFT JOIN errored_traces b2 - ON b0.block_number = b2.block_number - AND b0.tx_position = b2.tx_position - AND b2.trace_address = 'ORIGIN' -), -aggregated_errors AS ( - SELECT - block_number, - tx_position, - trace_address, - error, - IFF(MAX(any_error) IS NULL - AND error IS NULL - AND origin_error IS NULL, TRUE, FALSE) AS trace_succeeded - FROM - error_logic - GROUP BY - block_number, - tx_position, - trace_address, - error, - origin_error), - prod AS ( - SELECT - block_number, - tx_position, - tx_hash, - trace_address, - trace_succeeded AS prod_trace_succeeded - FROM - {{ ref('silver__fact_traces2') }} - WHERE - block_number BETWEEN {{ block_start }} - AND {{ block_end }} - ), - final_errors AS ( - SELECT - block_number, - tx_position, - trace_address, - error, - trace_succeeded, - prod_trace_succeeded - FROM - aggregated_errors - INNER JOIN prod USING ( - block_number, - tx_position, - trace_address - ) - WHERE - prod_trace_succeeded != trace_succeeded - UNION ALL - SELECT - NULL AS block_number, - NULL AS tx_position, - NULL AS trace_address, - NULL AS error, - NULL AS trace_succeeded, - NULL AS prod_trace_succeeded - ), - batch AS ( - SELECT - CAST({{ batch_id }} AS NUMBER(10, 0)) AS batch_id - ) - SELECT - batch_id, - block_number, - tx_position, - trace_address, - error, - trace_succeeded, - prod_trace_succeeded - FROM - batch - CROSS JOIN final_errors #} diff --git a/models/sources.yml b/models/sources.yml index 8da37ab3..edc635c7 100644 --- a/models/sources.yml +++ b/models/sources.yml @@ -38,14 +38,20 @@ sources: - name: bronze_streamline database: streamline schema: | - {{ "OPTIMISM_DEV" if var("STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES", False) else "OPTIMISM" }} + {{ var('GLOBAL_PROD_DB_NAME') ~ ('_dev' if var("STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES", False) else '') }} tables: - name: receipts - name: blocks - name: transactions - - name: debug_traceBlockByNumber + - name: debug_traceblockbynumber - name: decoded_logs - name: confirm_blocks + - name: blocks_v2 + - name: transactions_v2 + - name: receipts_v2 + - name: traces_v2 + - name: confirm_blocks_v2 + - name: decoded_logs_v2 - name: optimism_silver database: optimism schema: silver diff --git a/models/streamline/bronze/core/bronze__blocks.sql b/models/streamline/bronze/core/bronze__blocks.sql new file mode 100644 index 00000000..aa987cf1 --- /dev/null +++ b/models/streamline/bronze/core/bronze__blocks.sql @@ -0,0 +1,39 @@ +{# Set variables #} +{% set source_name = 'BLOCKS' %} +{% set source_version = 'V2' if var('GLOBAL_USES_STREAMLINE_V1', false) or var('GLOBAL_USES_BLOCKS_TRANSACTIONS_PATH', false) else '' %} +{% set model_type = '' %} + +{%- set default_vars = set_default_variables_bronze(source_name, model_type) -%} + +{% set partition_function = default_vars['partition_function'] %} +{% set partition_join_key = default_vars['partition_join_key'] %} +{% set balances = default_vars['balances'] %} +{% set block_number = default_vars['block_number'] %} +{% set uses_receipts_by_hash = default_vars['uses_receipts_by_hash'] %} + +{# Log configuration details #} +{{ log_bronze_details( + source_name = source_name, + source_version = source_version, + model_type = model_type, + partition_function = partition_function, + partition_join_key = partition_join_key, + block_number = block_number, + uses_receipts_by_hash = uses_receipts_by_hash +) }} + +{# Set up dbt configuration #} +{{ config ( + materialized = 'view', + tags = ['bronze_core'] +) }} + +{# Main query starts here #} +{{ streamline_external_table_query( + source_name = source_name.lower(), + source_version = source_version.lower(), + partition_function = partition_function, + balances = balances, + block_number = block_number, + uses_receipts_by_hash = uses_receipts_by_hash +) }} \ No newline at end of file diff --git a/models/streamline/bronze/core/bronze__blocks_fr.sql b/models/streamline/bronze/core/bronze__blocks_fr.sql new file mode 100644 index 00000000..33ad98f4 --- /dev/null +++ b/models/streamline/bronze/core/bronze__blocks_fr.sql @@ -0,0 +1,41 @@ +{{ config ( + materialized = 'view', + tags = ['bronze_core'] +) }} + +SELECT + partition_key, + block_number, + VALUE, + DATA, + metadata, + file_name, + _inserted_timestamp +FROM + {{ ref('bronze__blocks_fr_v2') }} +{% if var('GLOBAL_USES_STREAMLINE_V1', false) %} +UNION ALL +SELECT + _partition_by_block_id AS partition_key, + block_number, + VALUE, + DATA, + metadata, + file_name, + _inserted_timestamp +FROM + {{ ref('bronze__blocks_fr_v1') }} +{% endif %} +{% if var('GLOBAL_USES_BLOCKS_TRANSACTIONS_PATH', false) %} +UNION ALL +SELECT + partition_key, + block_number, + VALUE, + DATA, + metadata, + file_name, + _inserted_timestamp +FROM + {{ ref('bronze__blocks_fr_v2_1') }} +{% endif %} \ No newline at end of file diff --git a/models/streamline/bronze/core/bronze__blocks_fr_v1.sql b/models/streamline/bronze/core/bronze__blocks_fr_v1.sql new file mode 100644 index 00000000..b45639a6 --- /dev/null +++ b/models/streamline/bronze/core/bronze__blocks_fr_v1.sql @@ -0,0 +1,40 @@ +{# Set variables #} +{% set source_name = 'BLOCKS' %} +{% set source_version = '' %} +{% set model_type = 'FR' %} + +{%- set default_vars = set_default_variables_bronze(source_name, model_type) -%} + +{% set partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 3), '_', 1) AS INTEGER)" %} +{% set partition_join_key = '_partition_by_block_id' %} +{% set balances = default_vars['balances'] %} +{% set block_number = false %} +{% set uses_receipts_by_hash = default_vars['uses_receipts_by_hash'] %} + +{# Log configuration details #} +{{ log_bronze_details( + source_name = source_name, + source_version = source_version, + model_type = model_type, + partition_function = partition_function, + partition_join_key = partition_join_key, + block_number = block_number, + uses_receipts_by_hash = uses_receipts_by_hash +) }} + +{# Set up dbt configuration #} +{{ config ( + materialized = 'view', + tags = ['bronze_core_streamline_v1'] +) }} + +{# Main query starts here #} +{{ streamline_external_table_query_fr( + source_name = source_name.lower(), + source_version = source_version.lower(), + partition_function = partition_function, + partition_join_key = partition_join_key, + balances = balances, + block_number = block_number, + uses_receipts_by_hash = uses_receipts_by_hash +) }} diff --git a/models/streamline/bronze/core/bronze__blocks_fr_v2.sql b/models/streamline/bronze/core/bronze__blocks_fr_v2.sql new file mode 100644 index 00000000..91576bd6 --- /dev/null +++ b/models/streamline/bronze/core/bronze__blocks_fr_v2.sql @@ -0,0 +1,40 @@ +{# Set variables #} +{% set source_name = 'BLOCKS' %} +{% set source_version = 'V2' if var('GLOBAL_USES_STREAMLINE_V1', false) else '' %} +{% set model_type = 'FR' %} + +{%- set default_vars = set_default_variables_bronze(source_name, model_type) -%} + +{% set partition_function = default_vars['partition_function'] %} +{% set partition_join_key = default_vars['partition_join_key'] %} +{% set balances = default_vars['balances'] %} +{% set block_number = default_vars['block_number'] %} +{% set uses_receipts_by_hash = default_vars['uses_receipts_by_hash'] %} + +{# Log configuration details #} +{{ log_bronze_details( + source_name = source_name, + source_version = source_version, + model_type = model_type, + partition_function = partition_function, + partition_join_key = partition_join_key, + block_number = block_number, + uses_receipts_by_hash = uses_receipts_by_hash +) }} + +{# Set up dbt configuration #} +{{ config ( + materialized = 'view', + tags = ['bronze_core'] +) }} + +{# Main query starts here #} +{{ streamline_external_table_query_fr( + source_name = source_name.lower(), + source_version = source_version.lower(), + partition_function = partition_function, + partition_join_key = partition_join_key, + balances = balances, + block_number = block_number, + uses_receipts_by_hash = uses_receipts_by_hash +) }} \ No newline at end of file diff --git a/models/streamline/bronze/core/bronze__confirm_blocks.sql b/models/streamline/bronze/core/bronze__confirm_blocks.sql new file mode 100644 index 00000000..031fdb2c --- /dev/null +++ b/models/streamline/bronze/core/bronze__confirm_blocks.sql @@ -0,0 +1,39 @@ +{# Set variables #} +{% set source_name = 'CONFIRM_BLOCKS' %} +{% set source_version = 'V2' if var('GLOBAL_USES_STREAMLINE_V1', false) else '' %} +{% set model_type = '' %} + +{%- set default_vars = set_default_variables_bronze(source_name, model_type) -%} + +{% set partition_function = default_vars['partition_function'] %} +{% set partition_join_key = default_vars['partition_join_key'] %} +{% set balances = default_vars['balances'] %} +{% set block_number = default_vars['block_number'] %} +{% set uses_receipts_by_hash = default_vars['uses_receipts_by_hash'] %} + +{# Log configuration details #} +{{ log_bronze_details( + source_name = source_name, + source_version = source_version, + model_type = model_type, + partition_function = partition_function, + partition_join_key = partition_join_key, + block_number = block_number, + uses_receipts_by_hash = uses_receipts_by_hash +) }} + +{# Set up dbt configuration #} +{{ config ( + materialized = 'view', + tags = ['bronze_core'] +) }} + +{# Main query starts here #} +{{ streamline_external_table_query( + source_name = source_name.lower(), + source_version = source_version.lower(), + partition_function = partition_function, + balances = balances, + block_number = block_number, + uses_receipts_by_hash = uses_receipts_by_hash +) }} \ No newline at end of file diff --git a/models/streamline/bronze/core/bronze__confirm_blocks_fr.sql b/models/streamline/bronze/core/bronze__confirm_blocks_fr.sql new file mode 100644 index 00000000..ebf8ac69 --- /dev/null +++ b/models/streamline/bronze/core/bronze__confirm_blocks_fr.sql @@ -0,0 +1,28 @@ +{{ config ( + materialized = 'view', + tags = ['bronze_core'] +) }} + +SELECT + partition_key, + block_number, + VALUE, + DATA, + metadata, + file_name, + _inserted_timestamp +FROM + {{ ref('bronze__confirm_blocks_fr_v2') }} +{% if var('GLOBAL_USES_STREAMLINE_V1', false) %} +UNION ALL +SELECT + _partition_by_block_id AS partition_key, + block_number, + VALUE, + DATA, + metadata, + file_name, + _inserted_timestamp +FROM + {{ ref('bronze__confirm_blocks_fr_v1') }} +{% endif %} \ No newline at end of file diff --git a/models/streamline/bronze/core/bronze__confirm_blocks_fr_v1.sql b/models/streamline/bronze/core/bronze__confirm_blocks_fr_v1.sql new file mode 100644 index 00000000..a4402e53 --- /dev/null +++ b/models/streamline/bronze/core/bronze__confirm_blocks_fr_v1.sql @@ -0,0 +1,40 @@ +{# Set variables #} +{% set source_name = 'CONFIRM_BLOCKS' %} +{% set source_version = '' %} +{% set model_type = 'FR' %} + +{%- set default_vars = set_default_variables_bronze(source_name, model_type) -%} + +{% set partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 3), '_', 1) AS INTEGER)" %} +{% set partition_join_key = '_partition_by_block_id' %} +{% set balances = default_vars['balances'] %} +{% set block_number = false %} +{% set uses_receipts_by_hash = default_vars['uses_receipts_by_hash'] %} + +{# Log configuration details #} +{{ log_bronze_details( + source_name = source_name, + source_version = source_version, + model_type = model_type, + partition_function = partition_function, + partition_join_key = partition_join_key, + block_number = block_number, + uses_receipts_by_hash = uses_receipts_by_hash +) }} + +{# Set up dbt configuration #} +{{ config ( + materialized = 'view', + tags = ['bronze_core_streamline_v1'] +) }} + +{# Main query starts here #} +{{ streamline_external_table_query_fr( + source_name = source_name.lower(), + source_version = source_version.lower(), + partition_function = partition_function, + partition_join_key = partition_join_key, + balances = balances, + block_number = block_number, + uses_receipts_by_hash = uses_receipts_by_hash +) }} diff --git a/models/streamline/bronze/core/bronze__confirm_blocks_fr_v2.sql b/models/streamline/bronze/core/bronze__confirm_blocks_fr_v2.sql new file mode 100644 index 00000000..d03c45b9 --- /dev/null +++ b/models/streamline/bronze/core/bronze__confirm_blocks_fr_v2.sql @@ -0,0 +1,40 @@ +{# Set variables #} +{% set source_name = 'CONFIRM_BLOCKS' %} +{% set source_version = 'V2' if var('GLOBAL_USES_STREAMLINE_V1', false) else '' %} +{% set model_type = 'FR' %} + +{%- set default_vars = set_default_variables_bronze(source_name, model_type) -%} + +{% set partition_function = default_vars['partition_function'] %} +{% set partition_join_key = default_vars['partition_join_key'] %} +{% set balances = default_vars['balances'] %} +{% set block_number = default_vars['block_number'] %} +{% set uses_receipts_by_hash = default_vars['uses_receipts_by_hash'] %} + +{# Log configuration details #} +{{ log_bronze_details( + source_name = source_name, + source_version = source_version, + model_type = model_type, + partition_function = partition_function, + partition_join_key = partition_join_key, + block_number = block_number, + uses_receipts_by_hash = uses_receipts_by_hash +) }} + +{# Set up dbt configuration #} +{{ config ( + materialized = 'view', + tags = ['bronze_core'] +) }} + +{# Main query starts here #} +{{ streamline_external_table_query_fr( + source_name = source_name.lower(), + source_version = source_version.lower(), + partition_function = partition_function, + partition_join_key = partition_join_key, + balances = balances, + block_number = block_number, + uses_receipts_by_hash = uses_receipts_by_hash +) }} \ No newline at end of file diff --git a/models/streamline/bronze/core/bronze__receipts.sql b/models/streamline/bronze/core/bronze__receipts.sql new file mode 100644 index 00000000..21d84b90 --- /dev/null +++ b/models/streamline/bronze/core/bronze__receipts.sql @@ -0,0 +1,39 @@ +{# Set variables #} +{% set source_name = 'RECEIPTS' %} +{% set source_version = 'V2' if var('GLOBAL_USES_STREAMLINE_V1', false) else '' %} +{% set model_type = '' %} + +{%- set default_vars = set_default_variables_bronze(source_name, model_type) -%} + +{% set partition_function = default_vars['partition_function'] %} +{% set partition_join_key = default_vars['partition_join_key'] %} +{% set balances = default_vars['balances'] %} +{% set block_number = default_vars['block_number'] %} +{% set uses_receipts_by_hash = default_vars['uses_receipts_by_hash'] %} + +{# Log configuration details #} +{{ log_bronze_details( + source_name = source_name, + source_version = source_version, + model_type = model_type, + partition_function = partition_function, + partition_join_key = partition_join_key, + block_number = block_number, + uses_receipts_by_hash = uses_receipts_by_hash +) }} + +{# Set up dbt configuration #} +{{ config ( + materialized = 'view', + tags = ['bronze_receipts'] +) }} + +{# Main query starts here #} +{{ streamline_external_table_query( + source_name = source_name.lower(), + source_version = source_version.lower(), + partition_function = partition_function, + balances = balances, + block_number = block_number, + uses_receipts_by_hash = uses_receipts_by_hash +) }} \ No newline at end of file diff --git a/models/streamline/bronze/core/bronze__receipts_fr.sql b/models/streamline/bronze/core/bronze__receipts_fr.sql new file mode 100644 index 00000000..3b861c06 --- /dev/null +++ b/models/streamline/bronze/core/bronze__receipts_fr.sql @@ -0,0 +1,38 @@ +{{ config ( + materialized = 'view', + tags = ['bronze_receipts'] +) }} + +SELECT + partition_key, + block_number, + array_index, + VALUE, + DATA, + metadata, + file_name, + _inserted_timestamp +FROM + {{ ref('bronze__receipts_fr_v2') }} + + {% if var('GLOBAL_USES_STREAMLINE_V1',false) %} + UNION ALL + SELECT + _partition_by_block_id AS partition_key, + block_number, + COALESCE( + VALUE :"array_index" :: INT, + TRY_TO_NUMBER( + utils.udf_hex_to_int( + VALUE :"data" :"transactionIndex" :: STRING + ) + ) + ) AS array_index, + VALUE, + DATA, + metadata, + file_name, + _inserted_timestamp + FROM + {{ ref('bronze__receipts_fr_v1') }} + {% endif %} diff --git a/models/streamline/bronze/core/bronze__receipts_fr_v1.sql b/models/streamline/bronze/core/bronze__receipts_fr_v1.sql new file mode 100644 index 00000000..6c4956e2 --- /dev/null +++ b/models/streamline/bronze/core/bronze__receipts_fr_v1.sql @@ -0,0 +1,40 @@ +{# Set variables #} +{% set source_name = 'RECEIPTS' %} +{% set source_version = '' %} +{% set model_type = 'FR' %} + +{%- set default_vars = set_default_variables_bronze(source_name, model_type) -%} + +{% set partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 3), '_', 1) AS INTEGER)" %} +{% set partition_join_key = '_partition_by_block_id' %} +{% set balances = default_vars['balances'] %} +{% set block_number = false %} +{% set uses_receipts_by_hash = default_vars['uses_receipts_by_hash'] %} + +{# Log configuration details #} +{{ log_bronze_details( + source_name = source_name, + source_version = source_version, + model_type = model_type, + partition_function = partition_function, + partition_join_key = partition_join_key, + block_number = block_number, + uses_receipts_by_hash = uses_receipts_by_hash +) }} + +{# Set up dbt configuration #} +{{ config ( + materialized = 'view', + tags = ['bronze_core_streamline_v1','bronze_receipts'] +) }} + +{# Main query starts here #} +{{ streamline_external_table_query_fr( + source_name = source_name.lower(), + source_version = source_version.lower(), + partition_function = partition_function, + partition_join_key = partition_join_key, + balances = balances, + block_number = block_number, + uses_receipts_by_hash = uses_receipts_by_hash +) }} \ No newline at end of file diff --git a/models/streamline/bronze/core/bronze__receipts_fr_v2.sql b/models/streamline/bronze/core/bronze__receipts_fr_v2.sql new file mode 100644 index 00000000..90e0a071 --- /dev/null +++ b/models/streamline/bronze/core/bronze__receipts_fr_v2.sql @@ -0,0 +1,40 @@ +{# Set variables #} +{% set source_name = 'RECEIPTS' %} +{% set source_version = 'V2' if var('GLOBAL_USES_STREAMLINE_V1', false) else '' %} +{% set model_type = 'FR' %} + +{%- set default_vars = set_default_variables_bronze(source_name, model_type) -%} + +{% set partition_function = default_vars['partition_function'] %} +{% set partition_join_key = default_vars['partition_join_key'] %} +{% set balances = default_vars['balances'] %} +{% set block_number = default_vars['block_number'] %} +{% set uses_receipts_by_hash = default_vars['uses_receipts_by_hash'] %} + +{# Log configuration details #} +{{ log_bronze_details( + source_name = source_name, + source_version = source_version, + model_type = model_type, + partition_function = partition_function, + partition_join_key = partition_join_key, + block_number = block_number, + uses_receipts_by_hash = uses_receipts_by_hash +) }} + +{# Set up dbt configuration #} +{{ config ( + materialized = 'view', + tags = ['bronze_receipts'] +) }} + +{# Main query starts here #} +{{ streamline_external_table_query_fr( + source_name = source_name.lower(), + source_version = source_version.lower(), + partition_function = partition_function, + partition_join_key = partition_join_key, + balances = balances, + block_number = block_number, + uses_receipts_by_hash = uses_receipts_by_hash +) }} \ No newline at end of file diff --git a/models/streamline/bronze/core/bronze__streamline_blocks.sql b/models/streamline/bronze/core/bronze__streamline_blocks.sql deleted file mode 100644 index 22b0c51e..00000000 --- a/models/streamline/bronze/core/bronze__streamline_blocks.sql +++ /dev/null @@ -1,11 +0,0 @@ -{{ config ( - materialized = 'view' -) }} - -{% set model = this.identifier.split("_") [-1] %} -{{ streamline_external_table_query( - model, - partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 3), '_', 1) AS INTEGER)", - partition_name = "_partition_by_block_id", - unique_key = "block_number" -) }} diff --git a/models/streamline/bronze/core/bronze__streamline_confirm_blocks.sql b/models/streamline/bronze/core/bronze__streamline_confirm_blocks.sql deleted file mode 100644 index af451897..00000000 --- a/models/streamline/bronze/core/bronze__streamline_confirm_blocks.sql +++ /dev/null @@ -1,9 +0,0 @@ -{{ config ( - materialized = 'view' -) }} -{{ streamline_external_table_query( - model = "confirm_blocks", - partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER )", - partition_name = "_partition_by_block_id", - unique_key = "block_number" -) }} diff --git a/models/streamline/bronze/core/bronze__streamline_receipts.sql b/models/streamline/bronze/core/bronze__streamline_receipts.sql deleted file mode 100644 index 7fad1eaa..00000000 --- a/models/streamline/bronze/core/bronze__streamline_receipts.sql +++ /dev/null @@ -1,11 +0,0 @@ -{{ config ( - materialized = 'view' -) }} - -{% set model = this.identifier.split("_") [-1] %} -{{ streamline_external_table_query( - model, - partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 3), '_', 1) AS INTEGER )", - partition_name = "_partition_by_block_id", - unique_key = "block_number" -) }} diff --git a/models/streamline/bronze/core/bronze__streamline_traces.sql b/models/streamline/bronze/core/bronze__streamline_traces.sql deleted file mode 100644 index 1423eb97..00000000 --- a/models/streamline/bronze/core/bronze__streamline_traces.sql +++ /dev/null @@ -1,10 +0,0 @@ -{{ config ( - materialized = 'view' -) }} - -{{ streamline_external_table_query( - model = "debug_traceBlockByNumber", - partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 3), '_', 1) AS INTEGER )", - partition_name = "_partition_by_block_id", - unique_key = "block_number" -) }} diff --git a/models/streamline/bronze/core/bronze__streamline_transactions.sql b/models/streamline/bronze/core/bronze__streamline_transactions.sql deleted file mode 100644 index da8e3755..00000000 --- a/models/streamline/bronze/core/bronze__streamline_transactions.sql +++ /dev/null @@ -1,11 +0,0 @@ -{{ config ( - materialized = 'view' -) }} - -{% set model = this.identifier.split("_") [-1] %} -{{ streamline_external_table_query( - model, - partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 3), '_', 1) AS INTEGER )", - partition_name = "_partition_by_block_id", - unique_key = "block_number" -) }} diff --git a/models/streamline/bronze/core/bronze__traces.sql b/models/streamline/bronze/core/bronze__traces.sql new file mode 100644 index 00000000..f8661762 --- /dev/null +++ b/models/streamline/bronze/core/bronze__traces.sql @@ -0,0 +1,39 @@ +{# Set variables #} +{% set source_name = 'TRACES' %} +{% set source_version = 'V2' if var('GLOBAL_USES_STREAMLINE_V1', false) else '' %} +{% set model_type = '' %} + +{%- set default_vars = set_default_variables_bronze(source_name, model_type) -%} + +{% set partition_function = default_vars['partition_function'] %} +{% set partition_join_key = default_vars['partition_join_key'] %} +{% set balances = default_vars['balances'] %} +{% set block_number = default_vars['block_number'] %} +{% set uses_receipts_by_hash = default_vars['uses_receipts_by_hash'] %} + +{# Log configuration details #} +{{ log_bronze_details( + source_name = source_name, + source_version = source_version, + model_type = model_type, + partition_function = partition_function, + partition_join_key = partition_join_key, + block_number = block_number, + uses_receipts_by_hash = uses_receipts_by_hash +) }} + +{# Set up dbt configuration #} +{{ config ( + materialized = 'view', + tags = ['bronze_core'] +) }} + +{# Main query starts here #} +{{ streamline_external_table_query( + source_name = source_name.lower(), + source_version = source_version.lower(), + partition_function = partition_function, + balances = balances, + block_number = block_number, + uses_receipts_by_hash = uses_receipts_by_hash +) }} \ No newline at end of file diff --git a/models/streamline/bronze/core/bronze__traces_fr.sql b/models/streamline/bronze/core/bronze__traces_fr.sql new file mode 100644 index 00000000..b150c368 --- /dev/null +++ b/models/streamline/bronze/core/bronze__traces_fr.sql @@ -0,0 +1,30 @@ +{{ config ( + materialized = 'view', + tags = ['bronze_core'] +) }} + +SELECT + partition_key, + block_number, + array_index, + VALUE, + DATA, + metadata, + file_name, + _inserted_timestamp +FROM + {{ ref('bronze__traces_fr_v2') }} +{% if var('GLOBAL_USES_STREAMLINE_V1', false) %} +UNION ALL +SELECT + _partition_by_block_id AS partition_key, + block_number, + VALUE :"array_index" :: INT AS array_index, + VALUE, + DATA, + metadata, + file_name, + _inserted_timestamp +FROM + {{ ref('bronze__traces_fr_v1') }} +{% endif %} \ No newline at end of file diff --git a/models/streamline/bronze/core/bronze__traces_fr_v1.sql b/models/streamline/bronze/core/bronze__traces_fr_v1.sql new file mode 100644 index 00000000..3905c1b8 --- /dev/null +++ b/models/streamline/bronze/core/bronze__traces_fr_v1.sql @@ -0,0 +1,40 @@ +{# Set variables #} +{% set source_name = 'DEBUG_TRACEBLOCKBYNUMBER' if var('GLOBAL_USES_SINGLE_FLIGHT_METHOD',false) else 'TRACES' %} +{% set source_version = '' %} +{% set model_type = 'FR' %} + +{%- set default_vars = set_default_variables_bronze(source_name, model_type) -%} + +{% set partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 3), '_', 1) AS INTEGER)" %} +{% set partition_join_key = '_partition_by_block_id' %} +{% set balances = default_vars['balances'] %} +{% set block_number = false %} +{% set uses_receipts_by_hash = default_vars['uses_receipts_by_hash'] %} + +{# Log configuration details #} +{{ log_bronze_details( + source_name = source_name, + source_version = source_version, + model_type = model_type, + partition_function = partition_function, + partition_join_key = partition_join_key, + block_number = block_number, + uses_receipts_by_hash = uses_receipts_by_hash +) }} + +{# Set up dbt configuration #} +{{ config ( + materialized = 'view', + tags = ['bronze_core_streamline_v1'] +) }} + +{# Main query starts here #} +{{ streamline_external_table_query_fr( + source_name = source_name.lower(), + source_version = source_version.lower(), + partition_function = partition_function, + partition_join_key = partition_join_key, + balances = balances, + block_number = block_number, + uses_receipts_by_hash = uses_receipts_by_hash +) }} diff --git a/models/streamline/bronze/core/bronze__traces_fr_v2.sql b/models/streamline/bronze/core/bronze__traces_fr_v2.sql new file mode 100644 index 00000000..04158254 --- /dev/null +++ b/models/streamline/bronze/core/bronze__traces_fr_v2.sql @@ -0,0 +1,40 @@ +{# Set variables #} +{% set source_name = 'TRACES' %} +{% set source_version = 'V2' if var('GLOBAL_USES_STREAMLINE_V1', false) else '' %} +{% set model_type = 'FR' %} + +{%- set default_vars = set_default_variables_bronze(source_name, model_type) -%} + +{% set partition_function = default_vars['partition_function'] %} +{% set partition_join_key = default_vars['partition_join_key'] %} +{% set balances = default_vars['balances'] %} +{% set block_number = default_vars['block_number'] %} +{% set uses_receipts_by_hash = default_vars['uses_receipts_by_hash'] %} + +{# Log configuration details #} +{{ log_bronze_details( + source_name = source_name, + source_version = source_version, + model_type = model_type, + partition_function = partition_function, + partition_join_key = partition_join_key, + block_number = block_number, + uses_receipts_by_hash = uses_receipts_by_hash +) }} + +{# Set up dbt configuration #} +{{ config ( + materialized = 'view', + tags = ['bronze_core'] +) }} + +{# Main query starts here #} +{{ streamline_external_table_query_fr( + source_name = source_name.lower(), + source_version = source_version.lower(), + partition_function = partition_function, + partition_join_key = partition_join_key, + balances = balances, + block_number = block_number, + uses_receipts_by_hash = uses_receipts_by_hash +) }} \ No newline at end of file diff --git a/models/streamline/bronze/core/bronze__transactions.sql b/models/streamline/bronze/core/bronze__transactions.sql new file mode 100644 index 00000000..8b6ba90c --- /dev/null +++ b/models/streamline/bronze/core/bronze__transactions.sql @@ -0,0 +1,39 @@ +{# Set variables #} +{% set source_name = 'TRANSACTIONS' %} +{% set source_version = 'V2' if var('GLOBAL_USES_STREAMLINE_V1', false) or var('GLOBAL_USES_BLOCKS_TRANSACTIONS_PATH', false) else '' %} +{% set model_type = '' %} + +{%- set default_vars = set_default_variables_bronze(source_name, model_type) -%} + +{% set partition_function = default_vars['partition_function'] %} +{% set partition_join_key = default_vars['partition_join_key'] %} +{% set balances = default_vars['balances'] %} +{% set block_number = default_vars['block_number'] %} +{% set uses_receipts_by_hash = default_vars['uses_receipts_by_hash'] %} + +{# Log configuration details #} +{{ log_bronze_details( + source_name = source_name, + source_version = source_version, + model_type = model_type, + partition_function = partition_function, + partition_join_key = partition_join_key, + block_number = block_number, + uses_receipts_by_hash = uses_receipts_by_hash +) }} + +{# Set up dbt configuration #} +{{ config ( + materialized = 'view', + tags = ['bronze_core'] +) }} + +{# Main query starts here #} +{{ streamline_external_table_query( + source_name = source_name.lower(), + source_version = source_version.lower(), + partition_function = partition_function, + balances = balances, + block_number = block_number, + uses_receipts_by_hash = uses_receipts_by_hash +) }} \ No newline at end of file diff --git a/models/streamline/bronze/core/bronze__transactions_fr.sql b/models/streamline/bronze/core/bronze__transactions_fr.sql new file mode 100644 index 00000000..2721f1d7 --- /dev/null +++ b/models/streamline/bronze/core/bronze__transactions_fr.sql @@ -0,0 +1,41 @@ +{{ config ( + materialized = 'view', + tags = ['bronze_core'] +) }} + +SELECT + partition_key, + block_number, + VALUE, + DATA, + metadata, + file_name, + _inserted_timestamp +FROM + {{ ref('bronze__transactions_fr_v2') }} +{% if var('GLOBAL_USES_STREAMLINE_V1', false) %} +UNION ALL +SELECT + _partition_by_block_id AS partition_key, + block_number, + VALUE, + DATA, + metadata, + file_name, + _inserted_timestamp +FROM + {{ ref('bronze__transactions_fr_v1') }} +{% endif %} +{% if var('GLOBAL_USES_BLOCKS_TRANSACTIONS_PATH', false) %} +UNION ALL +SELECT + partition_key, + block_number, + VALUE, + DATA, + metadata, + file_name, + _inserted_timestamp +FROM + {{ ref('bronze__transactions_fr_v2_1') }} +{% endif %} \ No newline at end of file diff --git a/models/streamline/bronze/core/bronze__transactions_fr_v1.sql b/models/streamline/bronze/core/bronze__transactions_fr_v1.sql new file mode 100644 index 00000000..41179ac7 --- /dev/null +++ b/models/streamline/bronze/core/bronze__transactions_fr_v1.sql @@ -0,0 +1,40 @@ +{# Set variables #} +{% set source_name = 'TRANSACTIONS' %} +{% set source_version = '' %} +{% set model_type = 'FR' %} + +{%- set default_vars = set_default_variables_bronze(source_name, model_type) -%} + +{% set partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 3), '_', 1) AS INTEGER)" %} +{% set partition_join_key = '_partition_by_block_id' %} +{% set balances = default_vars['balances'] %} +{% set block_number = false %} +{% set uses_receipts_by_hash = default_vars['uses_receipts_by_hash'] %} + +{# Log configuration details #} +{{ log_bronze_details( + source_name = source_name, + source_version = source_version, + model_type = model_type, + partition_function = partition_function, + partition_join_key = partition_join_key, + block_number = block_number, + uses_receipts_by_hash = uses_receipts_by_hash +) }} + +{# Set up dbt configuration #} +{{ config ( + materialized = 'view', + tags = ['bronze_core_streamline_v1'] +) }} + +{# Main query starts here #} +{{ streamline_external_table_query_fr( + source_name = source_name.lower(), + source_version = source_version.lower(), + partition_function = partition_function, + partition_join_key = partition_join_key, + balances = balances, + block_number = block_number, + uses_receipts_by_hash = uses_receipts_by_hash +) }} diff --git a/models/streamline/bronze/core/bronze__transactions_fr_v2.sql b/models/streamline/bronze/core/bronze__transactions_fr_v2.sql new file mode 100644 index 00000000..7907074d --- /dev/null +++ b/models/streamline/bronze/core/bronze__transactions_fr_v2.sql @@ -0,0 +1,40 @@ +{# Set variables #} +{% set source_name = 'TRANSACTIONS' %} +{% set source_version = 'V2' if var('GLOBAL_USES_STREAMLINE_V1', false) else '' %} +{% set model_type = 'FR' %} + +{%- set default_vars = set_default_variables_bronze(source_name, model_type) -%} + +{% set partition_function = default_vars['partition_function'] %} +{% set partition_join_key = default_vars['partition_join_key'] %} +{% set balances = default_vars['balances'] %} +{% set block_number = default_vars['block_number'] %} +{% set uses_receipts_by_hash = default_vars['uses_receipts_by_hash'] %} + +{# Log configuration details #} +{{ log_bronze_details( + source_name = source_name, + source_version = source_version, + model_type = model_type, + partition_function = partition_function, + partition_join_key = partition_join_key, + block_number = block_number, + uses_receipts_by_hash = uses_receipts_by_hash +) }} + +{# Set up dbt configuration #} +{{ config ( + materialized = 'view', + tags = ['bronze_core'] +) }} + +{# Main query starts here #} +{{ streamline_external_table_query_fr( + source_name = source_name.lower(), + source_version = source_version.lower(), + partition_function = partition_function, + partition_join_key = partition_join_key, + balances = balances, + block_number = block_number, + uses_receipts_by_hash = uses_receipts_by_hash +) }} \ No newline at end of file diff --git a/models/streamline/bronze/core/fr/bronze__streamline_fr_blocks.sql b/models/streamline/bronze/core/fr/bronze__streamline_fr_blocks.sql deleted file mode 100644 index 33cf91be..00000000 --- a/models/streamline/bronze/core/fr/bronze__streamline_fr_blocks.sql +++ /dev/null @@ -1,11 +0,0 @@ -{{ config ( - materialized = 'view' -) }} - -{% set model = this.identifier.split("_") [-1] %} -{{ streamline_external_table_fr_query( - model, - partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 3), '_', 1) AS INTEGER)", - partition_name = "_partition_by_block_id", - unique_key = "block_number" -) }} diff --git a/models/streamline/bronze/core/fr/bronze__streamline_fr_confirm_blocks.sql b/models/streamline/bronze/core/fr/bronze__streamline_fr_confirm_blocks.sql deleted file mode 100644 index bee43e7b..00000000 --- a/models/streamline/bronze/core/fr/bronze__streamline_fr_confirm_blocks.sql +++ /dev/null @@ -1,9 +0,0 @@ -{{ config ( - materialized = 'view' -) }} -{{ streamline_external_table_fr_query( - model = "confirm_blocks", - partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER )", - partition_name = "_partition_by_block_id", - unique_key = "block_number" -) }} diff --git a/models/streamline/bronze/core/fr/bronze__streamline_fr_receipts.sql b/models/streamline/bronze/core/fr/bronze__streamline_fr_receipts.sql deleted file mode 100644 index c930f316..00000000 --- a/models/streamline/bronze/core/fr/bronze__streamline_fr_receipts.sql +++ /dev/null @@ -1,11 +0,0 @@ -{{ config ( - materialized = 'view' -) }} - -{% set model = this.identifier.split("_") [-1] %} -{{ streamline_external_table_fr_query( - model, - partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 3), '_', 1) AS INTEGER )", - partition_name = "_partition_by_block_id", - unique_key = "block_number" -) }} diff --git a/models/streamline/bronze/core/fr/bronze__streamline_fr_traces.sql b/models/streamline/bronze/core/fr/bronze__streamline_fr_traces.sql deleted file mode 100644 index d97be186..00000000 --- a/models/streamline/bronze/core/fr/bronze__streamline_fr_traces.sql +++ /dev/null @@ -1,9 +0,0 @@ -{{ config ( - materialized = 'view' -) }} -{{ streamline_external_table_fr_query( - model = "debug_traceBlockByNumber", - partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 3), '_', 1) AS INTEGER )", - partition_name = "_partition_by_block_id", - unique_key = "block_number" -) }} diff --git a/models/streamline/bronze/core/fr/bronze__streamline_fr_transactions.sql b/models/streamline/bronze/core/fr/bronze__streamline_fr_transactions.sql deleted file mode 100644 index c930f316..00000000 --- a/models/streamline/bronze/core/fr/bronze__streamline_fr_transactions.sql +++ /dev/null @@ -1,11 +0,0 @@ -{{ config ( - materialized = 'view' -) }} - -{% set model = this.identifier.split("_") [-1] %} -{{ streamline_external_table_fr_query( - model, - partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 3), '_', 1) AS INTEGER )", - partition_name = "_partition_by_block_id", - unique_key = "block_number" -) }} diff --git a/models/streamline/bronze/decoder/bronze__decoded_logs.sql b/models/streamline/bronze/decoder/bronze__decoded_logs.sql index bd43f6f8..8a339c61 100644 --- a/models/streamline/bronze/decoder/bronze__decoded_logs.sql +++ b/models/streamline/bronze/decoder/bronze__decoded_logs.sql @@ -1,41 +1,23 @@ -{{ config ( - materialized = 'view' +{# Set variables #} +{% set source_name = 'DECODED_LOGS' %} +{% set source_version = 'V2' if var('GLOBAL_USES_STREAMLINE_V1', false) else '' %} +{% set model_type = '' %} + +{%- set default_vars = set_default_variables_bronze(source_name, model_type) -%} + +{# Log configuration details #} +{{ log_model_details( + vars = default_vars ) }} -WITH meta AS ( +{# Set up dbt configuration #} +{{ config ( + materialized = 'view', + tags = ['bronze_decoded_logs'] +) }} - SELECT - last_modified AS _inserted_timestamp, - file_name, - CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 6), '_', 1) AS INTEGER) AS _partition_by_block_number, - TO_DATE( - concat_ws('-', SPLIT_PART(file_name, '/', 3), SPLIT_PART(file_name, '/', 4), SPLIT_PART(file_name, '/', 5)) - ) AS _partition_by_created_date - FROM - TABLE( - information_schema.external_table_file_registration_history( - start_time => DATEADD('day', -3, CURRENT_TIMESTAMP()), - table_name => '{{ source( "bronze_streamline", "decoded_logs") }}') - ) A - ) - SELECT - block_number, - id :: STRING AS id, - DATA, - _inserted_timestamp, - s._partition_by_block_number AS _partition_by_block_number, - s._partition_by_created_date AS _partition_by_created_date - FROM - {{ source( - "bronze_streamline", - "decoded_logs" - ) }} - s - JOIN meta b - ON b.file_name = metadata$filename - AND b._partition_by_block_number = s._partition_by_block_number - AND b._partition_by_created_date = s._partition_by_created_date - WHERE - b._partition_by_block_number = s._partition_by_block_number - AND b._partition_by_created_date = s._partition_by_created_date - AND s._partition_by_created_date >= DATEADD('day', -2, CURRENT_TIMESTAMP()) +{# Main query starts here #} +{{ streamline_external_table_query_decoder( + source_name = source_name.lower(), + source_version = source_version.lower() +) }} \ No newline at end of file diff --git a/models/streamline/bronze/decoder/bronze__decoded_logs_fr.sql b/models/streamline/bronze/decoder/bronze__decoded_logs_fr.sql new file mode 100644 index 00000000..bbe59557 --- /dev/null +++ b/models/streamline/bronze/decoder/bronze__decoded_logs_fr.sql @@ -0,0 +1,20 @@ +{# Log configuration details #} +{{ log_model_details() }} + +{# Set up dbt configuration #} +{{ config ( + materialized = 'view', + tags = ['bronze_decoded_logs'] +) }} + +SELECT + * +FROM + {{ ref('bronze__decoded_logs_fr_v2') }} +{% if var('GLOBAL_USES_STREAMLINE_V1', false) %} +UNION ALL +SELECT + * +FROM + {{ ref('bronze__decoded_logs_fr_v1') }} +{% endif %} diff --git a/models/streamline/bronze/decoder/bronze__decoded_logs_fr_v1.sql b/models/streamline/bronze/decoder/bronze__decoded_logs_fr_v1.sql new file mode 100644 index 00000000..8c122fa9 --- /dev/null +++ b/models/streamline/bronze/decoder/bronze__decoded_logs_fr_v1.sql @@ -0,0 +1,23 @@ +{# Set variables #} +{% set source_name = 'DECODED_LOGS' %} +{% set source_version = '' %} +{% set model_type = 'FR' %} + +{%- set default_vars = set_default_variables_bronze(source_name, model_type) -%} + +{# Log configuration details #} +{{ log_model_details( + vars = default_vars +) }} + +{# Set up dbt configuration #} +{{ config ( + materialized = 'view', + tags = ['bronze_decoded_logs_streamline_v1'] +) }} + +{# Main query starts here #} +{{ streamline_external_table_query_decoder_fr( + source_name = source_name.lower(), + source_version = source_version.lower() +) }} \ No newline at end of file diff --git a/models/streamline/bronze/decoder/bronze__decoded_logs_fr_v2.sql b/models/streamline/bronze/decoder/bronze__decoded_logs_fr_v2.sql new file mode 100644 index 00000000..2bd430a1 --- /dev/null +++ b/models/streamline/bronze/decoder/bronze__decoded_logs_fr_v2.sql @@ -0,0 +1,23 @@ +{# Set variables #} +{% set source_name = 'DECODED_LOGS' %} +{% set source_version = 'V2' if var('GLOBAL_USES_STREAMLINE_V1', false) else '' %} +{% set model_type = 'FR' %} + +{%- set default_vars = set_default_variables_bronze(source_name, model_type) -%} + +{# Log configuration details #} +{{ log_model_details( + vars = default_vars +) }} + +{# Set up dbt configuration #} +{{ config ( + materialized = 'view', + tags = ['bronze_decoded_logs'] +) }} + +{# Main query starts here #} +{{ streamline_external_table_query_decoder_fr( + source_name = source_name.lower(), + source_version = source_version.lower() +) }} \ No newline at end of file diff --git a/models/streamline/bronze/decoder/bronze__fr_decoded_logs.sql b/models/streamline/bronze/decoder/bronze__fr_decoded_logs.sql deleted file mode 100644 index 4e4a1c8f..00000000 --- a/models/streamline/bronze/decoder/bronze__fr_decoded_logs.sql +++ /dev/null @@ -1,40 +0,0 @@ -{{ config ( - materialized = 'view' -) }} - -WITH meta AS ( - - SELECT - registered_on AS _inserted_timestamp, - file_name, - CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 6), '_', 1) AS INTEGER) AS _partition_by_block_number, - TO_DATE( - concat_ws('-', SPLIT_PART(file_name, '/', 3), SPLIT_PART(file_name, '/', 4), SPLIT_PART(file_name, '/', 5)) - ) AS _partition_by_created_date - FROM - TABLE( - information_schema.external_table_files( - table_name => '{{ source( "bronze_streamline", "decoded_logs") }}' - ) - ) A -) -SELECT - block_number, - id :: STRING AS id, - DATA, - _inserted_timestamp, - s._partition_by_block_number AS _partition_by_block_number, - s._partition_by_created_date AS _partition_by_created_date -FROM - {{ source( - "bronze_streamline", - "decoded_logs" - ) }} - s - JOIN meta b - ON b.file_name = metadata$filename - AND b._partition_by_block_number = s._partition_by_block_number - AND b._partition_by_created_date = s._partition_by_created_date -WHERE - b._partition_by_block_number = s._partition_by_block_number - AND b._partition_by_created_date = s._partition_by_created_date diff --git a/models/streamline/bronze/streamline-sbx-setup.md b/models/streamline/bronze/streamline-sbx-setup.md deleted file mode 100644 index 630aff7b..00000000 --- a/models/streamline/bronze/streamline-sbx-setup.md +++ /dev/null @@ -1,106 +0,0 @@ -## Sandbox integration setup - -In order to perform a `sandbox` `streamline` integration you need to ![register](../../../macros/streamline/api_integrations.sql) with your `sbx api gateway` endpoint. - -### DBT Global config -- The first step is to configure your `global dbt` profile: - -```zsh -# create dbt global config -touch ~/.dbt/profiles.yaml -``` - -- And add the following into `~/.dbt/profiles.yaml` - -```yaml -optimism: - target: sbx - outputs: - sbx: - type: snowflake - account: vna27887.us-east-1 - role: DBT_CLOUD_OPTIMISM - user: @flipsidecrypto.com - authenticator: externalbrowser - region: us-east-1 - database: OPTIMISM_DEV - warehouse: DBT - schema: STREAMLINE - threads: 12 - client_session_keep_alive: False - query_tag: dbt__dev -``` - -### Create user & role for streamline lambdas to use and apply the appropriate roles - -```sql --- Create OPTIMISM_DEV.streamline schema -CREATE SCHEMA OPTIMISM_DEV.STREAMLINE - -CREATE ROLE AWS_LAMBDA_OPTIMISM_API_SBX; - -CREATE USER AWS_LAMBDA_OPTIMISM_API_SBX PASSWORD='abc123' DEFAULT_ROLE = AWS_LAMBDA_OPTIMISM_API_SBX MUST_CHANGE_PASSWORD = TRUE; - -GRANT SELECT ON ALL VIEWS IN SCHEMA OPTIMISM_DEV.STREAMLINE TO ROLE AWS_LAMBDA_OPTIMISM_API_SBX; - -GRANT ROLE AWS_LAMBDA_OPTIMISM_API TO USER AWS_LAMBDA_OPTIMISM_API; - --- Note that the password must meet Snowflake's password requirements, which include a minimum length of 8 characters, at least one uppercase letter, at least one lowercase letter, and at least one number or special character. - -ALTER USER AWS_LAMBDA_OPTIMISM_API_SBX SET PASSWORD = 'new_password'; -``` -### Register Snowflake integration and UDF's - -- Register the ![snowflake api integration](../../../macros/streamline/api_integrations.sql) either manually on `snowsight worksheet` or via `dbt` - -```sql --- Manually run on snowflake -CREATE api integration IF NOT EXISTS aws_optimism_api_sbx_shah api_provider = aws_api_gateway api_aws_role_arn = 'arn:aws:iam::579011195466:role/snowflake-api-optimism' api_allowed_prefixes = ( - 'https://3ifufl19z4.execute-api.us-east-1.amazonaws.com/sbx/' - ) enabled = TRUE; -``` - -```zsh -# Use dbt to run create_aws_optimism_api macro -dbt run-operation create_aws_optimism_api --target dev -``` - -- Add the UDF to the ![create udfs macro](/macros/create_udfs.sql) -- Register UDF - -```sql -CREATE -OR REPLACE EXTERNAL FUNCTION streamline.udf_get_chainhead() returns text api_integration = aws_optimism_api_sbx_shah AS 'https://3ifufl19z4.execute-api.us-east-1.amazonaws.com/sbx/udf_bulk_json_rpc'; - -CREATE -OR REPLACE EXTERNAL FUNCTION streamline.udf_bulk_json_rpc(json variant) returns text api_integration = aws_optimism_api_sbx_shah AS 'https://3ifufl19z4.execute-api.us-east-1.amazonaws.com/sbx/bulk_decode_logs'; - -GRANT USAGE ON FUNCTION streamline.udf_get_chainhead() TO DBT_CLOUD_OPTIMISM; -GRANT USAGE ON FUNCTION streamline.udf_bulk_json_rpc(variantq) TO DBT_CLOUD_OPTIMISM; -GRANT USAGE ON FUNCTION streamline.udtf_get_base_table(integer) TO DBT_CLOUD_OPTIMISM; -``` - -- Add the ![_max_block_by_date.sql](_max_block_by_date.sql) model -- Add the ![streamline__blocks](streamline__blocks.sql) model -- Add the ![get_base_table_udft.sql](../.././macros/streamline/get_base_table_udft.sql) macro - -- Grant privileges to `AWS_LAMBDA_OPTIMISMT_API` - -```sql -GRANT USAGE ON DATABASE OPTIMISM_DEV TO ROLE AWS_LAMBDA_OPTIMISM_API; -GRANT USAGE ON SCHEMA STREAMLINE TO ROLE AWS_LAMBDA_OPTIMISM_API; -GRANT USAGE ON WAREHOUSE DBT_CLOUD TO ROLE AWS_LAMBDA_OPTIMISM_API; -``` - -## Run decode models - -```zsh -# SBX -dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True, "STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES": True}' -m 1+models/silver/streamline/history --profile optimism --target sbx --profiles-dir ~/.dbt - -# DEV -dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True, "STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES": True}' -m 1+models/silver/streamline/history/ --profile optimism --target dev --profiles-dir ~/.dbt - -# PROD -dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m 1+models/silver/streamline/history/ --profile optimism --target prod --profiles-dir ~/.dbt -``` diff --git a/models/streamline/silver/_max_block_by_hour.sql b/models/streamline/silver/_max_block_by_hour.sql index 332546a6..80e3d972 100644 --- a/models/streamline/silver/_max_block_by_hour.sql +++ b/models/streamline/silver/_max_block_by_hour.sql @@ -15,7 +15,7 @@ WITH base AS ( WHERE block_timestamp > DATEADD( 'day', - -3, + -5, CURRENT_DATE ) GROUP BY diff --git a/models/streamline/silver/core/complete/streamline__blocks_complete.sql b/models/streamline/silver/core/complete/streamline__blocks_complete.sql new file mode 100644 index 00000000..4c8977d8 --- /dev/null +++ b/models/streamline/silver/core/complete/streamline__blocks_complete.sql @@ -0,0 +1,50 @@ +{# Set variables #} +{%- set source_name = 'BLOCKS' -%} +{%- set model_type = 'COMPLETE' -%} + +{%- set full_refresh_type = var((source_name ~ '_complete_full_refresh').upper(), false) -%} + +{% set post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(block_number)" %} + +{# Log configuration details #} +{{ log_complete_details( + post_hook = post_hook, + full_refresh_type = full_refresh_type +) }} + +{# Set up dbt configuration #} +-- depends_on: {{ ref('bronze__' ~ source_name.lower()) }} + +{{ config ( + materialized = "incremental", + unique_key = "block_number", + cluster_by = "ROUND(block_number, -3)", + post_hook = post_hook, + full_refresh = full_refresh_type, + tags = ['streamline_core_complete'] +) }} + +{# Main query starts here #} +SELECT + block_number, + file_name, + {{ dbt_utils.generate_surrogate_key(['block_number']) }} AS complete_{{ source_name.lower() }}_id, + SYSDATE() AS inserted_timestamp, + SYSDATE() AS modified_timestamp, + _inserted_timestamp, + '{{ invocation_id }}' AS _invocation_id +FROM + {% if is_incremental() %} + {{ ref('bronze__' ~ source_name.lower()) }} + WHERE + _inserted_timestamp >= ( + SELECT + COALESCE(MAX(_inserted_timestamp), '1970-01-01'::TIMESTAMP) AS _inserted_timestamp + FROM + {{ this }} + ) + {% else %} + {{ ref('bronze__' ~ source_name.lower() ~ '_fr') }} + {% endif %} + +QUALIFY (ROW_NUMBER() OVER (PARTITION BY block_number ORDER BY _inserted_timestamp DESC)) = 1 diff --git a/models/streamline/silver/core/complete/streamline__complete_confirmed_blocks.sql b/models/streamline/silver/core/complete/streamline__complete_confirmed_blocks.sql deleted file mode 100644 index 7e77b365..00000000 --- a/models/streamline/silver/core/complete/streamline__complete_confirmed_blocks.sql +++ /dev/null @@ -1,29 +0,0 @@ --- depends_on: {{ ref('bronze__streamline_confirm_blocks') }} -{{ config ( - materialized = "incremental", - unique_key = "id", - cluster_by = "ROUND(block_number, -3)", - tags = ['streamline_core_complete'] -) }} - -SELECT - id, - block_number, - _inserted_timestamp -FROM - -{% if is_incremental() %} -{{ ref('bronze__streamline_confirm_blocks') }} -WHERE - _inserted_timestamp >= ( - SELECT - COALESCE(MAX(_inserted_timestamp), '1970-01-01' :: TIMESTAMP) _inserted_timestamp - FROM - {{ this }}) - {% else %} - {{ ref('bronze__streamline_fr_confirm_blocks') }} - {% endif %} - - qualify(ROW_NUMBER() over (PARTITION BY id - ORDER BY - _inserted_timestamp DESC)) = 1 diff --git a/models/streamline/silver/core/complete/streamline__complete_debug_traceBlockByNumber.sql b/models/streamline/silver/core/complete/streamline__complete_debug_traceBlockByNumber.sql deleted file mode 100644 index 1b830c46..00000000 --- a/models/streamline/silver/core/complete/streamline__complete_debug_traceBlockByNumber.sql +++ /dev/null @@ -1,33 +0,0 @@ --- depends_on: {{ ref('bronze__streamline_traces') }} -{{ config ( - materialized = "incremental", - unique_key = "id", - cluster_by = "ROUND(block_number, -3)", - merge_update_columns = ["id"], - post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(id)", - tags = ['streamline_core_complete'] -) }} - -SELECT - id, - block_number, - file_name, - _inserted_timestamp -FROM - -{% if is_incremental() %} -{{ ref('bronze__streamline_traces') }} -WHERE - _inserted_timestamp >= ( - SELECT - MAX(_inserted_timestamp) _inserted_timestamp - FROM - {{ this }} - ) -{% else %} - {{ ref('bronze__streamline_fr_traces') }} -{% endif %} - -qualify(ROW_NUMBER() over (PARTITION BY id -ORDER BY - _inserted_timestamp DESC)) = 1 diff --git a/models/streamline/silver/core/complete/streamline__complete_qn_getBlockWithReceipts.sql b/models/streamline/silver/core/complete/streamline__complete_qn_getBlockWithReceipts.sql deleted file mode 100644 index 66de36ac..00000000 --- a/models/streamline/silver/core/complete/streamline__complete_qn_getBlockWithReceipts.sql +++ /dev/null @@ -1,32 +0,0 @@ --- depends_on: {{ ref('bronze__streamline_blocks') }} -{{ config ( - materialized = "incremental", - unique_key = "id", - cluster_by = "ROUND(block_number, -3)", - merge_update_columns = ["id"], - post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(id)", - tags = ['streamline_core_complete'] -) }} - -SELECT - id, - block_number, - _inserted_timestamp -FROM - -{% if is_incremental() %} -{{ ref('bronze__streamline_blocks') }} -WHERE - _inserted_timestamp >= ( - SELECT - MAX(_inserted_timestamp) _inserted_timestamp - FROM - {{ this }} - ) -{% else %} - {{ ref('bronze__streamline_fr_blocks') }} -{% endif %} - -qualify(ROW_NUMBER() over (PARTITION BY id -ORDER BY - _inserted_timestamp DESC)) = 1 diff --git a/models/streamline/silver/core/complete/streamline__confirm_blocks_complete.sql b/models/streamline/silver/core/complete/streamline__confirm_blocks_complete.sql new file mode 100644 index 00000000..fe73ef27 --- /dev/null +++ b/models/streamline/silver/core/complete/streamline__confirm_blocks_complete.sql @@ -0,0 +1,50 @@ +{# Set variables #} +{%- set source_name = 'CONFIRM_BLOCKS' -%} +{%- set model_type = 'COMPLETE' -%} + +{%- set full_refresh_type = var((source_name ~ '_complete_full_refresh').upper(), false) -%} + +{% set post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(block_number)" %} + +{# Log configuration details #} +{{ log_complete_details( + post_hook = post_hook, + full_refresh_type = full_refresh_type +) }} + +{# Set up dbt configuration #} +-- depends_on: {{ ref('bronze__' ~ source_name.lower()) }} + +{{ config ( + materialized = "incremental", + unique_key = "block_number", + cluster_by = "ROUND(block_number, -3)", + post_hook = post_hook, + full_refresh = full_refresh_type, + tags = ['streamline_core_complete_confirm_blocks'] +) }} + +{# Main query starts here #} +SELECT + block_number, + file_name, + {{ dbt_utils.generate_surrogate_key(['block_number']) }} AS complete_{{ source_name.lower() }}_id, + SYSDATE() AS inserted_timestamp, + SYSDATE() AS modified_timestamp, + _inserted_timestamp, + '{{ invocation_id }}' AS _invocation_id +FROM + {% if is_incremental() %} + {{ ref('bronze__' ~ source_name.lower()) }} + WHERE + _inserted_timestamp >= ( + SELECT + COALESCE(MAX(_inserted_timestamp), '1970-01-01'::TIMESTAMP) AS _inserted_timestamp + FROM + {{ this }} + ) + {% else %} + {{ ref('bronze__' ~ source_name.lower() ~ '_fr') }} + {% endif %} + +QUALIFY (ROW_NUMBER() OVER (PARTITION BY block_number ORDER BY _inserted_timestamp DESC)) = 1 diff --git a/models/streamline/silver/core/complete/streamline__receipts_complete.sql b/models/streamline/silver/core/complete/streamline__receipts_complete.sql new file mode 100644 index 00000000..b77138da --- /dev/null +++ b/models/streamline/silver/core/complete/streamline__receipts_complete.sql @@ -0,0 +1,50 @@ +{# Set variables #} +{%- set source_name = 'RECEIPTS' -%} +{%- set model_type = 'COMPLETE' -%} + +{%- set full_refresh_type = var((source_name ~ '_complete_full_refresh').upper(), false) -%} + +{% set post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(block_number)" %} + +{# Log configuration details #} +{{ log_complete_details( + post_hook = post_hook, + full_refresh_type = full_refresh_type +) }} + +{# Set up dbt configuration #} +-- depends_on: {{ ref('bronze__' ~ source_name.lower()) }} + +{{ config ( + materialized = "incremental", + unique_key = "block_number", + cluster_by = "ROUND(block_number, -3)", + post_hook = post_hook, + full_refresh = full_refresh_type, + tags = ['streamline_core_complete_receipts'] +) }} + +{# Main query starts here #} +SELECT + block_number, + file_name, + {{ dbt_utils.generate_surrogate_key(['block_number']) }} AS complete_{{ source_name.lower() }}_id, + SYSDATE() AS inserted_timestamp, + SYSDATE() AS modified_timestamp, + _inserted_timestamp, + '{{ invocation_id }}' AS _invocation_id +FROM + {% if is_incremental() %} + {{ ref('bronze__' ~ source_name.lower()) }} + WHERE + _inserted_timestamp >= ( + SELECT + COALESCE(MAX(_inserted_timestamp), '1970-01-01'::TIMESTAMP) AS _inserted_timestamp + FROM + {{ this }} + ) + {% else %} + {{ ref('bronze__' ~ source_name.lower() ~ '_fr') }} + {% endif %} + +QUALIFY (ROW_NUMBER() OVER (PARTITION BY block_number ORDER BY _inserted_timestamp DESC)) = 1 diff --git a/models/streamline/silver/core/complete/streamline__traces_complete.sql b/models/streamline/silver/core/complete/streamline__traces_complete.sql new file mode 100644 index 00000000..e7158769 --- /dev/null +++ b/models/streamline/silver/core/complete/streamline__traces_complete.sql @@ -0,0 +1,50 @@ +{# Set variables #} +{%- set source_name = 'TRACES' -%} +{%- set model_type = 'COMPLETE' -%} + +{%- set full_refresh_type = var((source_name ~ '_complete_full_refresh').upper(), false) -%} + +{% set post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(block_number)" %} + +{# Log configuration details #} +{{ log_complete_details( + post_hook = post_hook, + full_refresh_type = full_refresh_type +) }} + +{# Set up dbt configuration #} +-- depends_on: {{ ref('bronze__' ~ source_name.lower()) }} + +{{ config ( + materialized = "incremental", + unique_key = "block_number", + cluster_by = "ROUND(block_number, -3)", + post_hook = post_hook, + full_refresh = full_refresh_type, + tags = ['streamline_core_complete'] +) }} + +{# Main query starts here #} +SELECT + block_number, + file_name, + {{ dbt_utils.generate_surrogate_key(['block_number']) }} AS complete_{{ source_name.lower() }}_id, + SYSDATE() AS inserted_timestamp, + SYSDATE() AS modified_timestamp, + _inserted_timestamp, + '{{ invocation_id }}' AS _invocation_id +FROM + {% if is_incremental() %} + {{ ref('bronze__' ~ source_name.lower()) }} + WHERE + _inserted_timestamp >= ( + SELECT + COALESCE(MAX(_inserted_timestamp), '1970-01-01'::TIMESTAMP) AS _inserted_timestamp + FROM + {{ this }} + ) + {% else %} + {{ ref('bronze__' ~ source_name.lower() ~ '_fr') }} + {% endif %} + +QUALIFY (ROW_NUMBER() OVER (PARTITION BY block_number ORDER BY _inserted_timestamp DESC)) = 1 \ No newline at end of file diff --git a/models/streamline/silver/core/complete/streamline__transactions_complete.sql b/models/streamline/silver/core/complete/streamline__transactions_complete.sql new file mode 100644 index 00000000..06ec7f76 --- /dev/null +++ b/models/streamline/silver/core/complete/streamline__transactions_complete.sql @@ -0,0 +1,50 @@ +{# Set variables #} +{%- set source_name = 'TRANSACTIONS' -%} +{%- set model_type = 'COMPLETE' -%} + +{%- set full_refresh_type = var((source_name ~ '_complete_full_refresh').upper(), false) -%} + +{% set post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(block_number)"%} + +{# Log configuration details #} +{{ log_complete_details( + post_hook = post_hook, + full_refresh_type = full_refresh_type +) }} + +{# Set up dbt configuration #} +-- depends_on: {{ ref('bronze__' ~ source_name.lower()) }} + +{{ config ( + materialized = "incremental", + unique_key = "block_number", + cluster_by = "ROUND(block_number, -3)", + post_hook = post_hook, + full_refresh = full_refresh_type, + tags = ['streamline_core_complete'] +) }} + +{# Main query starts here #} +SELECT + block_number, + file_name, + {{ dbt_utils.generate_surrogate_key(['block_number']) }} AS complete_{{ source_name.lower() }}_id, + SYSDATE() AS inserted_timestamp, + SYSDATE() AS modified_timestamp, + _inserted_timestamp, + '{{ invocation_id }}' AS _invocation_id +FROM + {% if is_incremental() %} + {{ ref('bronze__' ~ source_name.lower()) }} + WHERE + _inserted_timestamp >= ( + SELECT + COALESCE(MAX(_inserted_timestamp), '1970-01-01'::TIMESTAMP) AS _inserted_timestamp + FROM + {{ this }} + ) + {% else %} + {{ ref('bronze__' ~ source_name.lower() ~ '_fr') }} + {% endif %} + +QUALIFY (ROW_NUMBER() OVER (PARTITION BY block_number ORDER BY _inserted_timestamp DESC)) = 1 \ No newline at end of file diff --git a/models/streamline/silver/core/history/streamline__blocks_transactions_history.sql b/models/streamline/silver/core/history/streamline__blocks_transactions_history.sql new file mode 100644 index 00000000..baed1d72 --- /dev/null +++ b/models/streamline/silver/core/history/streamline__blocks_transactions_history.sql @@ -0,0 +1,112 @@ +{# Set variables #} +{%- set model_name = 'BLOCKS_TRANSACTIONS' -%} +{%- set model_type = 'HISTORY' -%} + +{%- set default_vars = set_default_variables_streamline(model_name, model_type) -%} + +{# Set up parameters for the streamline process. These will come from the vars set in dbt_project.yml #} + +{%- set streamline_params = set_streamline_parameters( + model_name=model_name, + model_type=model_type +) -%} + +{%- set node_url = default_vars['node_url'] -%} +{%- set node_secret_path = default_vars['node_secret_path'] -%} +{%- set model_quantum_state = default_vars['model_quantum_state'] -%} +{%- set sql_limit = streamline_params['sql_limit'] -%} +{%- set testing_limit = default_vars['testing_limit'] -%} +{%- set order_by_clause = default_vars['order_by_clause'] -%} +{%- set new_build = default_vars['new_build'] -%} +{%- set method_params = streamline_params['method_params'] -%} +{%- set method = streamline_params['method'] -%} + +{# Log configuration details #} +{{ log_streamline_details( + model_name=model_name, + model_type=model_type, + node_url=node_url, + model_quantum_state=model_quantum_state, + sql_limit=sql_limit, + testing_limit=testing_limit, + order_by_clause=order_by_clause, + new_build=new_build, + streamline_params=streamline_params, + method_params=method_params, + method=method +) }} + +{# Set up dbt configuration #} +{{ config ( + materialized = "view", + post_hook = fsc_utils.if_data_call_function_v2( + func = 'streamline.udf_bulk_rest_api_v2', + target = "{{this.schema}}.{{this.identifier}}", + params = streamline_params + ), + tags = ['streamline_core_history'] +) }} + +{# Main query starts here #} +WITH +{% if not new_build %} + last_3_days AS ( + SELECT block_number + FROM {{ ref("_block_lookback") }} + ), +{% endif %} + +{# Identify blocks that need processing #} +to_do AS ( + SELECT block_number + FROM {{ ref("streamline__blocks") }} + WHERE + block_number IS NOT NULL + {% if not new_build %} + AND block_number <= (SELECT block_number FROM last_3_days) + {% endif %} + + EXCEPT + + SELECT block_number + FROM {{ ref("streamline__blocks_complete") }} b + INNER JOIN {{ ref("streamline__transactions_complete") }} t USING(block_number) + WHERE 1=1 + {% if not new_build %} + AND block_number <= (SELECT block_number FROM last_3_days) + {% endif %} +), +ready_blocks AS ( + SELECT block_number + FROM to_do + + {% if testing_limit is not none %} + LIMIT {{ testing_limit }} + {% endif %} +) + +{# Generate API requests for each block #} +SELECT + block_number, + ROUND(block_number, -3) AS partition_key, + live.udf_api( + 'POST', + '{{ node_url }}', + OBJECT_CONSTRUCT( + 'Content-Type', 'application/json', + 'fsc-quantum-state', '{{ model_quantum_state }}' + ), + OBJECT_CONSTRUCT( + 'id', block_number, + 'jsonrpc', '2.0', + 'method', '{{ method }}', + 'params', {{ method_params }} + ), + '{{ node_secret_path }}' + ) AS request +FROM + ready_blocks + +{{ order_by_clause }} + +LIMIT {{ sql_limit }} \ No newline at end of file diff --git a/models/streamline/silver/core/history/streamline__confirm_blocks_history.sql b/models/streamline/silver/core/history/streamline__confirm_blocks_history.sql new file mode 100644 index 00000000..d715b311 --- /dev/null +++ b/models/streamline/silver/core/history/streamline__confirm_blocks_history.sql @@ -0,0 +1,133 @@ +{# Set variables #} +{%- set model_name = 'CONFIRM_BLOCKS' -%} +{%- set model_type = 'HISTORY' -%} + +{%- set default_vars = set_default_variables_streamline(model_name, model_type) -%} + +{# Set up parameters for the streamline process. These will come from the vars set in dbt_project.yml #} +{%- set streamline_params = set_streamline_parameters( + model_name=model_name, + model_type=model_type +) -%} + +{%- set node_url = default_vars['node_url'] -%} +{%- set node_secret_path = default_vars['node_secret_path'] -%} +{%- set model_quantum_state = default_vars['model_quantum_state'] -%} +{%- set sql_limit = streamline_params['sql_limit'] -%} +{%- set testing_limit = default_vars['testing_limit'] -%} +{%- set order_by_clause = default_vars['order_by_clause'] -%} +{%- set new_build = default_vars['new_build'] -%} +{%- set method_params = streamline_params['method_params'] -%} +{%- set method = streamline_params['method'] -%} + +{# Log configuration details #} +{{ log_streamline_details( + model_name=model_name, + model_type=model_type, + node_url=node_url, + model_quantum_state=model_quantum_state, + sql_limit=sql_limit, + testing_limit=testing_limit, + order_by_clause=order_by_clause, + new_build=new_build, + streamline_params=streamline_params, + method_params=method_params, + method=method +) }} + +{# Set up dbt configuration #} +{{ config ( + materialized = "view", + post_hook = fsc_utils.if_data_call_function_v2( + func = 'streamline.udf_bulk_rest_api_v2', + target = "{{this.schema}}.{{this.identifier}}", + params = streamline_params + ), + tags = ['streamline_core_history_confirm_blocks'] +) }} + +{# Main query starts here #} +WITH +{% if not new_build %} + last_3_days AS ( + SELECT block_number + FROM {{ ref("_block_lookback") }} + ), +{% endif %} + +{# Delay blocks #} +look_back AS ( + SELECT + block_number + FROM + {{ ref("_max_block_by_hour") }} + qualify ROW_NUMBER() over ( + ORDER BY + block_number DESC + ) = 6 + ), + +{# Identify blocks that need processing #} +to_do AS ( + SELECT block_number + FROM {{ ref("streamline__blocks") }} + WHERE + block_number IS NOT NULL + AND block_number <= (SELECT block_number FROM look_back) + {% if not new_build %} + AND block_number <= (SELECT block_number FROM last_3_days) + {% endif %} + + EXCEPT + + {# Exclude blocks that have already been processed #} + SELECT block_number + FROM {{ ref('streamline__' ~ model_name.lower() ~ '_complete') }} + WHERE 1=1 + AND block_number IS NOT NULL + AND block_number <= (SELECT block_number FROM look_back) + AND _inserted_timestamp >= DATEADD( + 'day', + -4, + SYSDATE() + ) + {% if not new_build %} + AND block_number <= (SELECT block_number FROM last_3_days) + {% endif %} +) + +{# Prepare the final list of blocks to process #} +,ready_blocks AS ( + SELECT block_number + FROM to_do + + {% if testing_limit is not none %} + LIMIT {{ testing_limit }} + {% endif %} +) + +{# Generate API requests for each block #} +SELECT + block_number, + ROUND(block_number, -3) AS partition_key, + live.udf_api( + 'POST', + '{{ node_url }}', + OBJECT_CONSTRUCT( + 'Content-Type', 'application/json', + 'fsc-quantum-state', '{{ model_quantum_state }}' + ), + OBJECT_CONSTRUCT( + 'id', block_number, + 'jsonrpc', '2.0', + 'method', '{{ method }}', + 'params', {{ method_params }} + ), + '{{ node_secret_path }}' + ) AS request +FROM + ready_blocks + +{{ order_by_clause }} + +LIMIT {{ sql_limit }} \ No newline at end of file diff --git a/models/streamline/silver/core/history/streamline__debug_traceBlockByNumber_history.sql b/models/streamline/silver/core/history/streamline__debug_traceBlockByNumber_history.sql deleted file mode 100644 index fa9a8213..00000000 --- a/models/streamline/silver/core/history/streamline__debug_traceBlockByNumber_history.sql +++ /dev/null @@ -1,48 +0,0 @@ -{{ config ( - materialized = "view", - post_hook = if_data_call_function( - func = "{{this.schema}}.udf_bulk_get_traces(object_construct('sql_source', '{{this.identifier}}','exploded_key','[\"result\"]', 'method', 'debug_traceBlockByNumber', 'external_table', 'debug_traceBlockByNumber', 'sql_limit', {{var('sql_limit','12960000')}}, 'producer_batch_size', {{var('producer_batch_size','920000')}}, 'worker_batch_size', {{var('worker_batch_size','230000')}}, 'batch_call_limit', {{var('batch_call_limit','1')}}))", - target = "{{this.schema}}.{{this.identifier}}" - ), - tags = ['streamline_core_history'] -) }} - -WITH blocks AS ( - - SELECT - block_number - FROM - {{ ref("streamline__blocks") }} - EXCEPT - SELECT - block_number - FROM - {{ ref("streamline__complete_debug_traceBlockByNumber") }} -) -SELECT - PARSE_JSON( - CONCAT( - '{"jsonrpc": "2.0",', - '"method": "debug_traceBlockByNumber", "params":["', - REPLACE( - concat_ws( - '', - '0x', - to_char( - block_number :: INTEGER, - 'XXXXXXXX' - ) - ), - ' ', - '' - ), - '",{"tracer": "callTracer","timeout": "30s"}', - '],"id":"', - block_number :: INTEGER, - '"}' - ) - ) AS request -FROM - blocks -ORDER BY - block_number ASC diff --git a/models/streamline/silver/core/history/streamline__qn_getBlockWithReceipts_history.sql b/models/streamline/silver/core/history/streamline__qn_getBlockWithReceipts_history.sql deleted file mode 100644 index 5b907c64..00000000 --- a/models/streamline/silver/core/history/streamline__qn_getBlockWithReceipts_history.sql +++ /dev/null @@ -1,47 +0,0 @@ -{{ config ( - materialized = "view", - post_hook = if_data_call_function( - func = "{{this.schema}}.udf_bulk_json_rpc(object_construct('sql_source', '{{this.identifier}}', 'external_table', 'qn_getBlockWithReceipts', 'sql_limit', {{var('sql_limit','200000')}}, 'producer_batch_size', {{var('producer_batch_size','3500')}}, 'worker_batch_size', {{var('worker_batch_size','3500')}}, 'batch_call_limit', {{var('batch_call_limit','10')}}))", - target = "{{this.schema}}.{{this.identifier}}" - ), - tags = ['streamline_core_history'] -) }} - -WITH blocks AS ( - - SELECT - block_number - FROM - {{ ref("streamline__blocks") }} - EXCEPT - SELECT - block_number - FROM - {{ ref("streamline__complete_qn_getBlockWithReceipts") }} -) -SELECT - PARSE_JSON( - CONCAT( - '{"jsonrpc": "2.0",', - '"method": "qn_getBlockWithReceipts", "params":["', - REPLACE( - concat_ws( - '', - '0x', - to_char( - block_number :: INTEGER, - 'XXXXXXXX' - ) - ), - ' ', - '' - ), - '"],"id":"', - block_number :: INTEGER, - '"}' - ) - ) AS request -FROM - blocks -ORDER BY - block_number ASC diff --git a/models/streamline/silver/core/history/streamline__receipts_history.sql b/models/streamline/silver/core/history/streamline__receipts_history.sql new file mode 100644 index 00000000..4cd5823e --- /dev/null +++ b/models/streamline/silver/core/history/streamline__receipts_history.sql @@ -0,0 +1,113 @@ +{# Set variables #} +{%- set model_name = 'RECEIPTS' -%} +{%- set model_type = 'HISTORY' -%} + +{%- set default_vars = set_default_variables_streamline(model_name, model_type) -%} + +{# Set up parameters for the streamline process. These will come from the vars set in dbt_project.yml #} +{%- set streamline_params = set_streamline_parameters( + model_name=model_name, + model_type=model_type +) -%} + +{%- set node_url = default_vars['node_url'] -%} +{%- set node_secret_path = default_vars['node_secret_path'] -%} +{%- set model_quantum_state = default_vars['model_quantum_state'] -%} +{%- set sql_limit = streamline_params['sql_limit'] -%} +{%- set testing_limit = default_vars['testing_limit'] -%} +{%- set order_by_clause = default_vars['order_by_clause'] -%} +{%- set new_build = default_vars['new_build'] -%} +{%- set method_params = streamline_params['method_params'] -%} +{%- set method = streamline_params['method'] -%} + +{# Log configuration details #} +{{ log_streamline_details( + model_name=model_name, + model_type=model_type, + node_url=node_url, + model_quantum_state=model_quantum_state, + sql_limit=sql_limit, + testing_limit=testing_limit, + order_by_clause=order_by_clause, + new_build=new_build, + streamline_params=streamline_params, + method_params=method_params, + method=method +) }} + +{# Set up dbt configuration #} +{{ config ( + materialized = "view", + post_hook = fsc_utils.if_data_call_function_v2( + func = 'streamline.udf_bulk_rest_api_v2', + target = "{{this.schema}}.{{this.identifier}}", + params = streamline_params + ), + tags = ['streamline_core_history_receipts'] +) }} + +{# Main query starts here #} +WITH +{% if not new_build %} + last_3_days AS ( + SELECT block_number + FROM {{ ref("_block_lookback") }} + ), +{% endif %} + +{# Identify blocks that need processing #} +to_do AS ( + SELECT block_number + FROM {{ ref("streamline__blocks") }} + WHERE + block_number IS NOT NULL + {% if not new_build %} + AND block_number <= (SELECT block_number FROM last_3_days) + {% endif %} + + EXCEPT + + {# Exclude blocks that have already been processed #} + SELECT block_number + FROM {{ ref('streamline__' ~ model_name.lower() ~ '_complete') }} + WHERE 1=1 + {% if not new_build %} + AND block_number <= (SELECT block_number FROM last_3_days) + {% endif %} +) + +{# Prepare the final list of blocks to process #} +,ready_blocks AS ( + SELECT block_number + FROM to_do + + {% if testing_limit is not none %} + LIMIT {{ testing_limit }} + {% endif %} +) + +{# Generate API requests for each block #} +SELECT + block_number, + ROUND(block_number, -3) AS partition_key, + live.udf_api( + 'POST', + '{{ node_url }}', + OBJECT_CONSTRUCT( + 'Content-Type', 'application/json', + 'fsc-quantum-state', '{{ model_quantum_state }}' + ), + OBJECT_CONSTRUCT( + 'id', block_number, + 'jsonrpc', '2.0', + 'method', '{{ method }}', + 'params', {{ method_params }} + ), + '{{ node_secret_path }}' + ) AS request +FROM + ready_blocks + +{{ order_by_clause }} + +LIMIT {{ sql_limit }} \ No newline at end of file diff --git a/models/streamline/silver/core/history/streamline__traces_history.sql b/models/streamline/silver/core/history/streamline__traces_history.sql new file mode 100644 index 00000000..0cf5cc27 --- /dev/null +++ b/models/streamline/silver/core/history/streamline__traces_history.sql @@ -0,0 +1,113 @@ +{# Set variables #} +{%- set model_name = 'TRACES' -%} +{%- set model_type = 'HISTORY' -%} + +{%- set default_vars = set_default_variables_streamline(model_name, model_type) -%} + +{# Set up parameters for the streamline process. These will come from the vars set in dbt_project.yml #} +{%- set streamline_params = set_streamline_parameters( + model_name=model_name, + model_type=model_type +) -%} + +{%- set node_url = default_vars['node_url'] -%} +{%- set node_secret_path = default_vars['node_secret_path'] -%} +{%- set model_quantum_state = default_vars['model_quantum_state'] -%} +{%- set sql_limit = streamline_params['sql_limit'] -%} +{%- set testing_limit = default_vars['testing_limit'] -%} +{%- set order_by_clause = default_vars['order_by_clause'] -%} +{%- set new_build = default_vars['new_build'] -%} +{%- set method_params = streamline_params['method_params'] -%} +{%- set method = streamline_params['method'] -%} + +{# Log configuration details #} +{{ log_streamline_details( + model_name=model_name, + model_type=model_type, + node_url=node_url, + model_quantum_state=model_quantum_state, + sql_limit=sql_limit, + testing_limit=testing_limit, + order_by_clause=order_by_clause, + new_build=new_build, + streamline_params=streamline_params, + method_params=method_params, + method=method +) }} + +{# Set up dbt configuration #} +{{ config ( + materialized = "view", + post_hook = fsc_utils.if_data_call_function_v2( + func = 'streamline.udf_bulk_rest_api_v2', + target = "{{this.schema}}.{{this.identifier}}", + params = streamline_params + ), + tags = ['streamline_core_history'] +) }} + +{# Main query starts here #} +WITH +{% if not new_build %} + last_3_days AS ( + SELECT block_number + FROM {{ ref("_block_lookback") }} + ), +{% endif %} + +{# Identify blocks that need processing #} +to_do AS ( + SELECT block_number + FROM {{ ref("streamline__blocks") }} + WHERE + block_number IS NOT NULL + {% if not new_build %} + AND block_number <= (SELECT block_number FROM last_3_days) + {% endif %} + + EXCEPT + + {# Exclude blocks that have already been processed #} + SELECT block_number + FROM {{ ref('streamline__' ~ model_name.lower() ~ '_complete') }} + WHERE 1=1 + {% if not new_build %} + AND block_number <= (SELECT block_number FROM last_3_days) + {% endif %} +) + +{# Prepare the final list of blocks to process #} +,ready_blocks AS ( + SELECT block_number + FROM to_do + + {% if testing_limit is not none %} + LIMIT {{ testing_limit }} + {% endif %} +) + +{# Generate API requests for each block #} +SELECT + block_number, + ROUND(block_number, -3) AS partition_key, + live.udf_api( + 'POST', + '{{ node_url }}', + OBJECT_CONSTRUCT( + 'Content-Type', 'application/json', + 'fsc-quantum-state', '{{ model_quantum_state }}' + ), + OBJECT_CONSTRUCT( + 'id', block_number, + 'jsonrpc', '2.0', + 'method', '{{ method }}', + 'params', {{ method_params }} + ), + '{{ node_secret_path }}' + ) AS request +FROM + ready_blocks + +{{ order_by_clause }} + +LIMIT {{ sql_limit }} \ No newline at end of file diff --git a/models/streamline/silver/core/realtime/streamline__blocks_transactions_realtime.sql b/models/streamline/silver/core/realtime/streamline__blocks_transactions_realtime.sql new file mode 100644 index 00000000..0bd0a93f --- /dev/null +++ b/models/streamline/silver/core/realtime/streamline__blocks_transactions_realtime.sql @@ -0,0 +1,126 @@ +{# Set variables #} +{%- set model_name = 'BLOCKS_TRANSACTIONS' -%} +{%- set model_type = 'REALTIME' -%} +{%- set min_block = var('GLOBAL_START_UP_BLOCK', none) -%} + +{%- set default_vars = set_default_variables_streamline(model_name, model_type) -%} + +{# Set up parameters for the streamline process. These will come from the vars set in dbt_project.yml #} +{%- set streamline_params = set_streamline_parameters( + model_name=model_name, + model_type=model_type +) -%} + +{%- set node_url = default_vars['node_url'] -%} +{%- set node_secret_path = default_vars['node_secret_path'] -%} +{%- set model_quantum_state = default_vars['model_quantum_state'] -%} +{%- set sql_limit = streamline_params['sql_limit'] -%} +{%- set testing_limit = default_vars['testing_limit'] -%} +{%- set order_by_clause = default_vars['order_by_clause'] -%} +{%- set new_build = default_vars['new_build'] -%} +{%- set method_params = streamline_params['method_params'] -%} +{%- set method = streamline_params['method'] -%} + +{# Log configuration details #} +{{ log_streamline_details( + model_name=model_name, + model_type=model_type, + node_url=node_url, + model_quantum_state=model_quantum_state, + sql_limit=sql_limit, + testing_limit=testing_limit, + order_by_clause=order_by_clause, + new_build=new_build, + streamline_params=streamline_params, + method_params=method_params, + method=method, + min_block=min_block +) }} + +{# Set up dbt configuration #} +{{ config ( + materialized = "view", + post_hook = fsc_utils.if_data_call_function_v2( + func = 'streamline.udf_bulk_rest_api_v2', + target = "{{this.schema}}.{{this.identifier}}", + params = streamline_params + ), + tags = ['streamline_core_realtime'] +) }} + +{# Main query starts here #} +WITH +{% if not new_build %} + last_3_days AS ( + SELECT block_number + FROM {{ ref("_block_lookback") }} + ), +{% endif %} + +{# Identify blocks that need processing #} +to_do AS ( + SELECT block_number + FROM {{ ref("streamline__blocks") }} + WHERE + block_number IS NOT NULL + {% if not new_build %} + AND block_number >= (SELECT block_number FROM last_3_days) + {% endif %} + + {% if min_block is not none %} + AND block_number >= {{ min_block }} + {% endif %} + + EXCEPT + + SELECT block_number + FROM {{ ref("streamline__blocks_complete") }} b + INNER JOIN {{ ref("streamline__transactions_complete") }} t USING(block_number) + WHERE 1=1 + {% if not new_build %} + AND block_number >= (SELECT block_number FROM last_3_days) + {% endif %} +), +ready_blocks AS ( + SELECT block_number + FROM to_do + + {% if not new_build %} + UNION + SELECT block_number + FROM {{ ref("_unconfirmed_blocks") }} + UNION + SELECT block_number + FROM {{ ref("_missing_txs") }} + {% endif %} + + {% if testing_limit is not none %} + LIMIT {{ testing_limit }} + {% endif %} +) + +{# Generate API requests for each block #} +SELECT + block_number, + ROUND(block_number, -3) AS partition_key, + live.udf_api( + 'POST', + '{{ node_url }}', + OBJECT_CONSTRUCT( + 'Content-Type', 'application/json', + 'fsc-quantum-state', '{{ model_quantum_state }}' + ), + OBJECT_CONSTRUCT( + 'id', block_number, + 'jsonrpc', '2.0', + 'method', '{{ method }}', + 'params', {{ method_params }} + ), + '{{ node_secret_path }}' + ) AS request +FROM + ready_blocks + +{{ order_by_clause }} + +LIMIT {{ sql_limit }} \ No newline at end of file diff --git a/models/streamline/silver/core/realtime/streamline__confirm_blocks_realtime.sql b/models/streamline/silver/core/realtime/streamline__confirm_blocks_realtime.sql index 54f534a5..6135a3fc 100644 --- a/models/streamline/silver/core/realtime/streamline__confirm_blocks_realtime.sql +++ b/models/streamline/silver/core/realtime/streamline__confirm_blocks_realtime.sql @@ -1,19 +1,63 @@ -{{ config ( - materialized = "view", - post_hook = if_data_call_function( - func = "{{this.schema}}.udf_bulk_json_rpc(object_construct('sql_source', '{{this.identifier}}', 'external_table', 'confirm_blocks', 'sql_limit', {{var('sql_limit','1000000')}}, 'producer_batch_size', {{var('producer_batch_size','100000')}}, 'worker_batch_size', {{var('worker_batch_size','1000')}}, 'batch_call_limit', {{var('batch_call_limit','10')}}))", - target = "{{this.schema}}.{{this.identifier}}" - ), - tags = ['streamline_core_realtime'] +{# Set variables #} +{%- set model_name = 'CONFIRM_BLOCKS' -%} +{%- set model_type = 'REALTIME' -%} +{%- set min_block = var('GLOBAL_START_UP_BLOCK', none) -%} + +{%- set default_vars = set_default_variables_streamline(model_name, model_type) -%} + +{# Set up parameters for the streamline process. These will come from the vars set in dbt_project.yml #} +{%- set streamline_params = set_streamline_parameters( + model_name=model_name, + model_type=model_type +) -%} + +{%- set node_url = default_vars['node_url'] -%} +{%- set node_secret_path = default_vars['node_secret_path'] -%} +{%- set model_quantum_state = default_vars['model_quantum_state'] -%} +{%- set sql_limit = streamline_params['sql_limit'] -%} +{%- set testing_limit = default_vars['testing_limit'] -%} +{%- set order_by_clause = default_vars['order_by_clause'] -%} +{%- set new_build = default_vars['new_build'] -%} +{%- set method_params = streamline_params['method_params'] -%} +{%- set method = streamline_params['method'] -%} + +{# Log configuration details #} +{{ log_streamline_details( + model_name=model_name, + model_type=model_type, + node_url=node_url, + model_quantum_state=model_quantum_state, + sql_limit=sql_limit, + testing_limit=testing_limit, + order_by_clause=order_by_clause, + new_build=new_build, + streamline_params=streamline_params, + method_params=method_params, + method=method, + min_block=min_block ) }} -WITH last_3_days AS ( +{# Set up dbt configuration #} +{{ config ( + materialized = "view", + post_hook = fsc_utils.if_data_call_function_v2( + func = 'streamline.udf_bulk_rest_api_v2', + target = "{{this.schema}}.{{this.identifier}}", + params = streamline_params + ), + tags = ['streamline_core_realtime_confirm_blocks'] +) }} - SELECT - block_number - FROM - {{ ref("_block_lookback") }} -), +{# Main query starts here #} +WITH +{% if not new_build %} + last_3_days AS ( + SELECT block_number + FROM {{ ref("_block_lookback") }} + ), +{% endif %} + +{# Delay blocks #} look_back AS ( SELECT block_number @@ -23,76 +67,72 @@ look_back AS ( ORDER BY block_number DESC ) = 6 -), -tbl AS ( - SELECT - block_number - FROM - {{ ref("streamline__blocks") }} - WHERE + ), + +{# Identify blocks that need processing #} +to_do AS ( + SELECT block_number + FROM {{ ref("streamline__blocks") }} + WHERE block_number IS NOT NULL - AND block_number <= ( - SELECT - block_number - FROM - look_back - ) - AND block_number >= ( - SELECT - block_number - FROM - last_3_days - ) + AND block_number <= (SELECT block_number FROM look_back) + {% if not new_build %} + AND block_number >= (SELECT block_number FROM last_3_days) + {% endif %} + {% if min_block is not none %} + AND block_number >= {{ min_block }} + {% endif %} + EXCEPT - SELECT - block_number - FROM - {{ ref("streamline__complete_confirmed_blocks") }} - WHERE - block_number IS NOT NULL - AND block_number <= ( - SELECT - block_number - FROM - look_back - ) + + {# Exclude blocks that have already been processed #} + SELECT block_number + FROM {{ ref('streamline__' ~ model_name.lower() ~ '_complete') }} + WHERE 1=1 + AND block_number IS NOT NULL + AND block_number <= (SELECT block_number FROM look_back) AND _inserted_timestamp >= DATEADD( 'day', -4, SYSDATE() ) - AND block_number >= ( - SELECT - block_number - FROM - last_3_days - ) + {% if not new_build %} + AND block_number >= (SELECT block_number FROM last_3_days) + {% endif %} ) + +{# Prepare the final list of blocks to process #} +,ready_blocks AS ( + SELECT block_number + FROM to_do + + {% if testing_limit is not none %} + LIMIT {{ testing_limit }} + {% endif %} +) + +{# Generate API requests for each block #} SELECT - PARSE_JSON( - CONCAT( - '{"jsonrpc": "2.0",', - '"method": "eth_getBlockByNumber", "params":["', - REPLACE( - concat_ws( - '', - '0x', - to_char( - block_number :: INTEGER, - 'XXXXXXXX' - ) - ), - ' ', - '' - ), - '", false],"id":"', - block_number :: INTEGER, - '"}' - ) + block_number, + ROUND(block_number, -3) AS partition_key, + live.udf_api( + 'POST', + '{{ node_url }}', + OBJECT_CONSTRUCT( + 'Content-Type', 'application/json', + 'fsc-quantum-state', '{{ model_quantum_state }}' + ), + OBJECT_CONSTRUCT( + 'id', block_number, + 'jsonrpc', '2.0', + 'method', '{{ method }}', + 'params', {{ method_params }} + ), + '{{ node_secret_path }}' ) AS request FROM - tbl -ORDER BY - block_number ASC -LIMIT - 36000 + ready_blocks + +{{ order_by_clause }} + +LIMIT {{ sql_limit }} \ No newline at end of file diff --git a/models/streamline/silver/core/realtime/streamline__debug_traceBlockByNumber_realtime.sql b/models/streamline/silver/core/realtime/streamline__debug_traceBlockByNumber_realtime.sql deleted file mode 100644 index 1230ccd2..00000000 --- a/models/streamline/silver/core/realtime/streamline__debug_traceBlockByNumber_realtime.sql +++ /dev/null @@ -1,100 +0,0 @@ -{{ config ( - materialized = "view", - post_hook = if_data_call_function( - func = "{{this.schema}}.udf_bulk_get_traces(object_construct('sql_source', '{{this.identifier}}', 'external_table', 'debug_traceBlockByNumber', 'sql_limit', {{var('sql_limit','50000')}}, 'producer_batch_size', {{var('producer_batch_size','15000')}}, 'worker_batch_size', {{var('worker_batch_size','1500')}}, 'batch_call_limit', {{var('batch_call_limit','1')}}))", - target = "{{this.schema}}.{{this.identifier}}" - ), - tags = ['streamline_core_realtime'] -) }} - -WITH last_3_days AS ( - - SELECT - block_number - FROM - {{ ref("_block_lookback") }} -), -blocks AS ( - SELECT - block_number - FROM - {{ ref("streamline__blocks") }} - WHERE - ( - block_number >= ( - SELECT - block_number - FROM - last_3_days - ) - ) - EXCEPT - SELECT - block_number - FROM - {{ ref("streamline__complete_debug_traceBlockByNumber") }} - WHERE - ( - block_number >= ( - SELECT - block_number - FROM - last_3_days - ) - ) - AND _inserted_timestamp >= DATEADD( - 'day', - -4, - SYSDATE() - ) -), -all_blocks AS ( - SELECT - block_number - FROM - blocks - UNION - SELECT - block_number - FROM - ( - SELECT - block_number - FROM - {{ ref("_missing_traces") }} - UNION - SELECT - block_number - FROM - {{ ref("_unconfirmed_blocks") }} - ) -) -SELECT - PARSE_JSON( - CONCAT( - '{"jsonrpc": "2.0",', - '"method": "debug_traceBlockByNumber", "params":["', - REPLACE( - concat_ws( - '', - '0x', - to_char( - block_number :: INTEGER, - 'XXXXXXXX' - ) - ), - ' ', - '' - ), - '",{"tracer": "callTracer","timeout": "30s"}', - '],"id":"', - block_number :: INTEGER, - '"}' - ) - ) AS request -FROM - all_blocks -ORDER BY - block_number ASC -LIMIT - 7500 diff --git a/models/streamline/silver/core/realtime/streamline__qn_getBlockWithReceipts_realtime.sql b/models/streamline/silver/core/realtime/streamline__qn_getBlockWithReceipts_realtime.sql deleted file mode 100644 index fe7b6fbd..00000000 --- a/models/streamline/silver/core/realtime/streamline__qn_getBlockWithReceipts_realtime.sql +++ /dev/null @@ -1,104 +0,0 @@ -{{ config ( - materialized = "view", - post_hook = if_data_call_function( - func = "{{this.schema}}.udf_bulk_json_rpc(object_construct('sql_source', '{{this.identifier}}', 'external_table', 'qn_getBlockWithReceipts', 'sql_limit', {{var('sql_limit','80000')}}, 'producer_batch_size', {{var('producer_batch_size','40000')}}, 'worker_batch_size', {{var('worker_batch_size','40000')}}, 'batch_call_limit', {{var('batch_call_limit','10')}}))", - target = "{{this.schema}}.{{this.identifier}}" - ), - tags = ['streamline_core_realtime'] -) }} - -WITH last_3_days AS ( - - SELECT - block_number - FROM - {{ ref("_block_lookback") }} -), -blocks AS ( - SELECT - block_number :: STRING AS block_number - FROM - {{ ref("streamline__blocks") }} - WHERE - ( - block_number >= ( - SELECT - block_number - FROM - last_3_days - ) - ) - EXCEPT - SELECT - block_number :: STRING - FROM - {{ ref("streamline__complete_qn_getBlockWithReceipts") }} - WHERE - ( - block_number >= ( - SELECT - block_number - FROM - last_3_days - ) - ) - AND _inserted_timestamp >= DATEADD( - 'day', - -4, - SYSDATE() - ) -), -all_blocks AS ( - SELECT - block_number - FROM - blocks - UNION - SELECT - block_number - FROM - ( - SELECT - block_number - FROM - {{ ref("_missing_receipts") }} - UNION - SELECT - block_number - FROM - {{ ref("_missing_txs") }} - UNION - SELECT - block_number - FROM - {{ ref("_unconfirmed_blocks") }} - ) -) -SELECT - PARSE_JSON( - CONCAT( - '{"jsonrpc": "2.0",', - '"method": "qn_getBlockWithReceipts", "params":["', - REPLACE( - concat_ws( - '', - '0x', - to_char( - block_number :: INTEGER, - 'XXXXXXXX' - ) - ), - ' ', - '' - ), - '"],"id":"', - block_number :: INTEGER, - '"}' - ) - ) AS request -FROM - all_blocks -ORDER BY - block_number ASC -LIMIT - 36000 diff --git a/models/streamline/silver/core/realtime/streamline__receipts_realtime.sql b/models/streamline/silver/core/realtime/streamline__receipts_realtime.sql new file mode 100644 index 00000000..6ec5c850 --- /dev/null +++ b/models/streamline/silver/core/realtime/streamline__receipts_realtime.sql @@ -0,0 +1,130 @@ +{# Set variables #} +{%- set model_name = 'RECEIPTS' -%} +{%- set model_type = 'REALTIME' -%} +{%- set min_block = var('GLOBAL_START_UP_BLOCK', none) -%} + +{%- set default_vars = set_default_variables_streamline(model_name, model_type) -%} + +{# Set up parameters for the streamline process. These will come from the vars set in dbt_project.yml #} +{%- set streamline_params = set_streamline_parameters( + model_name=model_name, + model_type=model_type +) -%} + +{%- set node_url = default_vars['node_url'] -%} +{%- set node_secret_path = default_vars['node_secret_path'] -%} +{%- set model_quantum_state = default_vars['model_quantum_state'] -%} +{%- set sql_limit = streamline_params['sql_limit'] -%} +{%- set testing_limit = default_vars['testing_limit'] -%} +{%- set order_by_clause = default_vars['order_by_clause'] -%} +{%- set new_build = default_vars['new_build'] -%} +{%- set method_params = streamline_params['method_params'] -%} +{%- set method = streamline_params['method'] -%} + +{# Log configuration details #} +{{ log_streamline_details( + model_name=model_name, + model_type=model_type, + node_url=node_url, + model_quantum_state=model_quantum_state, + sql_limit=sql_limit, + testing_limit=testing_limit, + order_by_clause=order_by_clause, + new_build=new_build, + streamline_params=streamline_params, + method_params=method_params, + method=method, + min_block=min_block +) }} + +{# Set up dbt configuration #} +{{ config ( + materialized = "view", + post_hook = fsc_utils.if_data_call_function_v2( + func = 'streamline.udf_bulk_rest_api_v2', + target = "{{this.schema}}.{{this.identifier}}", + params = streamline_params + ), + tags = ['streamline_core_realtime_receipts'] +) }} + +{# Main query starts here #} +WITH +{% if not new_build %} + last_3_days AS ( + SELECT block_number + FROM {{ ref("_block_lookback") }} + ), +{% endif %} + +{# Identify blocks that need processing #} +to_do AS ( + SELECT block_number + FROM {{ ref("streamline__blocks") }} + WHERE + block_number IS NOT NULL + {% if not new_build %} + AND block_number >= (SELECT block_number FROM last_3_days) + {% endif %} + {% if min_block is not none %} + AND block_number >= {{ min_block }} + {% endif %} + + EXCEPT + + {# Exclude blocks that have already been processed #} + SELECT block_number + FROM {{ ref('streamline__' ~ model_name.lower() ~ '_complete') }} + WHERE 1=1 + {% if not new_build %} + AND block_number >= (SELECT block_number FROM last_3_days) + {% endif %} +) + +{# Prepare the final list of blocks to process #} +,ready_blocks AS ( + SELECT block_number + FROM to_do + + {% if not new_build %} + UNION + SELECT block_number + FROM {{ ref("_unconfirmed_blocks") }} + UNION + SELECT block_number + FROM {{ ref("_missing_txs") }} + UNION + SELECT block_number + FROM {{ ref("_missing_receipts") }} + {% endif %} + + {% if testing_limit is not none %} + LIMIT {{ testing_limit }} + {% endif %} +) + +{# Generate API requests for each block #} +SELECT + block_number, + ROUND(block_number, -3) AS partition_key, + live.udf_api( + 'POST', + '{{ node_url }}', + OBJECT_CONSTRUCT( + 'Content-Type', 'application/json', + 'fsc-quantum-state', '{{ model_quantum_state }}' + ), + OBJECT_CONSTRUCT( + 'id', block_number, + 'jsonrpc', '2.0', + 'method', '{{ method }}', + 'params', {{ method_params }} + ), + '{{ node_secret_path }}' + ) AS request +FROM + ready_blocks + +{{ order_by_clause }} + +LIMIT {{ sql_limit }} \ No newline at end of file diff --git a/models/streamline/silver/core/realtime/streamline__traces_realtime.sql b/models/streamline/silver/core/realtime/streamline__traces_realtime.sql new file mode 100644 index 00000000..34f7f984 --- /dev/null +++ b/models/streamline/silver/core/realtime/streamline__traces_realtime.sql @@ -0,0 +1,127 @@ +{# Set variables #} +{%- set model_name = 'TRACES' -%} +{%- set model_type = 'REALTIME' -%} +{%- set min_block = var('GLOBAL_START_UP_BLOCK', none) -%} + +{%- set default_vars = set_default_variables_streamline(model_name, model_type) -%} + +{# Set up parameters for the streamline process. These will come from the vars set in dbt_project.yml #} +{%- set streamline_params = set_streamline_parameters( + model_name=model_name, + model_type=model_type +) -%} + +{%- set node_url = default_vars['node_url'] -%} +{%- set node_secret_path = default_vars['node_secret_path'] -%} +{%- set model_quantum_state = default_vars['model_quantum_state'] -%} +{%- set sql_limit = streamline_params['sql_limit'] -%} +{%- set testing_limit = default_vars['testing_limit'] -%} +{%- set order_by_clause = default_vars['order_by_clause'] -%} +{%- set new_build = default_vars['new_build'] -%} +{%- set method_params = streamline_params['method_params'] -%} +{%- set method = streamline_params['method'] -%} + +{# Log configuration details #} +{{ log_streamline_details( + model_name=model_name, + model_type=model_type, + node_url=node_url, + model_quantum_state=model_quantum_state, + sql_limit=sql_limit, + testing_limit=testing_limit, + order_by_clause=order_by_clause, + new_build=new_build, + streamline_params=streamline_params, + method_params=method_params, + method=method, + min_block=min_block +) }} + +{# Set up dbt configuration #} +{{ config ( + materialized = "view", + post_hook = fsc_utils.if_data_call_function_v2( + func = 'streamline.udf_bulk_rest_api_v2', + target = "{{this.schema}}.{{this.identifier}}", + params = streamline_params + ), + tags = ['streamline_core_realtime'] +) }} + +{# Main query starts here #} +WITH +{% if not new_build %} + last_3_days AS ( + SELECT block_number + FROM {{ ref("_block_lookback") }} + ), +{% endif %} + +{# Identify blocks that need processing #} +to_do AS ( + SELECT block_number + FROM {{ ref("streamline__blocks") }} + WHERE + block_number IS NOT NULL + {% if not new_build %} + AND block_number >= (SELECT block_number FROM last_3_days) + {% endif %} + {% if min_block is not none %} + AND block_number >= {{ min_block }} + {% endif %} + + EXCEPT + + {# Exclude blocks that have already been processed #} + SELECT block_number + FROM {{ ref('streamline__' ~ model_name.lower() ~ '_complete') }} + WHERE 1=1 + {% if not new_build %} + AND block_number >= (SELECT block_number FROM last_3_days) + {% endif %} +) + +{# Prepare the final list of blocks to process #} +,ready_blocks AS ( + SELECT block_number + FROM to_do + + {% if not new_build %} + UNION + SELECT block_number + FROM {{ ref("_unconfirmed_blocks") }} + UNION + SELECT block_number + FROM {{ ref("_missing_traces") }} + {% endif %} + + {% if testing_limit is not none %} + LIMIT {{ testing_limit }} + {% endif %} +) + +{# Generate API requests for each block #} +SELECT + block_number, + ROUND(block_number, -3) AS partition_key, + live.udf_api( + 'POST', + '{{ node_url }}', + OBJECT_CONSTRUCT( + 'Content-Type', 'application/json', + 'fsc-quantum-state', '{{ model_quantum_state }}' + ), + OBJECT_CONSTRUCT( + 'id', block_number, + 'jsonrpc', '2.0', + 'method', '{{ method }}', + 'params', {{ method_params }} + ), + '{{ node_secret_path }}' + ) AS request +FROM + ready_blocks + +{{ order_by_clause }} + +LIMIT {{ sql_limit }} \ No newline at end of file diff --git a/models/streamline/silver/core/retry/_missing_traces.sql b/models/streamline/silver/core/retry/_missing_traces.sql index f4ea89d7..346f556c 100644 --- a/models/streamline/silver/core/retry/_missing_traces.sql +++ b/models/streamline/silver/core/retry/_missing_traces.sql @@ -2,30 +2,16 @@ materialized = "ephemeral" ) }} -WITH lookback AS ( - - SELECT - block_number - FROM - {{ ref("_block_lookback") }} -) SELECT - DISTINCT tx.block_number block_number + DISTINCT tx.block_number FROM - {{ ref("silver__transactions") }} + {{ ref("test_silver__transactions_recent") }} tx - LEFT JOIN {{ ref("core__fact_traces") }} - tr - ON tx.block_number = tr.block_number - AND tx.tx_hash = tr.tx_hash -WHERE - tx.block_timestamp >= DATEADD('hour', -84, SYSDATE()) - AND tr.tx_hash IS NULL - AND tx.block_number >= ( - SELECT - block_number - FROM - lookback + LEFT JOIN {{ ref("test_gold__fact_traces_recent") }} + tr USING ( + block_number, + tx_hash ) - AND tr.block_timestamp >= DATEADD('hour', -84, SYSDATE()) - AND tr.block_timestamp IS NOT NULL +WHERE + tr.tx_hash IS NULL + AND tx.block_timestamp > DATEADD('day', -5, SYSDATE()) diff --git a/models/streamline/silver/core/streamline__blocks.sql b/models/streamline/silver/core/streamline__blocks.sql index 4ffb73e4..9e56c764 100644 --- a/models/streamline/silver/core/streamline__blocks.sql +++ b/models/streamline/silver/core/streamline__blocks.sql @@ -1,20 +1,36 @@ +{%- if flags.WHICH == 'compile' and execute -%} + + {% set config_log = '\n' %} + {% set config_log = config_log ~ '\n=== DBT Model Config ===\n'%} + {% set config_log = config_log ~ '\n{{ config (\n' %} + {% set config_log = config_log ~ ' materialized = "' ~ config.get('materialized') ~ '",\n' %} + {% set config_log = config_log ~ ' tags = ' ~ config.get('tags') | tojson ~ '\n' %} + {% set config_log = config_log ~ ') }}\n' %} + {{ log(config_log, info=True) }} + {{ log("", info=True) }} +{%- endif -%} + {{ config ( materialized = "view", tags = ['streamline_core_complete'] ) }} -{% if execute %} - {% set height = run_query('SELECT streamline.udf_get_chainhead()') %} - {% set block_height = height.columns [0].values() [0] %} -{% else %} - {% set block_height = 0 %} -{% endif %} - SELECT - _id AS block_number + _id, + ( + ({{ var('GLOBAL_BLOCKS_PER_HOUR',0) }} / 60) * {{ var('GLOBAL_CHAINHEAD_DELAY',3) }} + ) :: INT AS block_number_delay, --minute-based block delay + (_id - block_number_delay) :: INT AS block_number, + utils.udf_int_to_hex(block_number) AS block_number_hex FROM - {{ ref("silver__number_sequence") }} + {{ ref('silver__number_sequence') }} WHERE - _id <= {{ block_height }} -ORDER BY - _id ASC + _id <= ( + SELECT + COALESCE( + block_number, + 0 + ) + FROM + {{ ref("streamline__get_chainhead") }} + ) \ No newline at end of file diff --git a/models/streamline/silver/core/streamline__get_chainhead.sql b/models/streamline/silver/core/streamline__get_chainhead.sql new file mode 100644 index 00000000..9e98ae7b --- /dev/null +++ b/models/streamline/silver/core/streamline__get_chainhead.sql @@ -0,0 +1,54 @@ +{%- set model_quantum_state = var('CHAINHEAD_QUANTUM_STATE', 'livequery') -%} + +{%- set node_url = var('GLOBAL_NODE_URL', '{Service}/{Authentication}') -%} + +{%- if flags.WHICH == 'compile' and execute -%} + + {{ log("=== Current Variable Settings ===", info=True) }} + {{ log("CHAINHEAD_QUANTUM_STATE: " ~ model_quantum_state, info=True) }} + {{ log("", info=True) }} + + {{ log("=== API Details ===", info=True) }} + {{ log("NODE_URL: " ~ node_url, info=True) }} + {{ log("NODE_SECRET_PATH: " ~ var('GLOBAL_NODE_SECRET_PATH'), info=True) }} + {{ log("", info=True) }} + + {% set config_log = '\n' %} + {% set config_log = config_log ~ '\n=== DBT Model Config ===\n'%} + {% set config_log = config_log ~ '\n{{ config (\n' %} + {% set config_log = config_log ~ ' materialized = "' ~ config.get('materialized') ~ '",\n' %} + {% set config_log = config_log ~ ' tags = ' ~ config.get('tags') | tojson ~ '\n' %} + {% set config_log = config_log ~ ') }}\n' %} + {{ log(config_log, info=True) }} + {{ log("", info=True) }} + +{%- endif -%} + +{{ config ( + materialized = 'table', + tags = ['streamline_core_complete','chainhead'] +) }} + +SELECT + live.udf_api( + 'POST', + '{{ node_url }}', + OBJECT_CONSTRUCT( + 'Content-Type', 'application/json', + 'fsc-quantum-state', '{{ model_quantum_state }}' + ), + OBJECT_CONSTRUCT( + 'id', + 0, + 'jsonrpc', + '2.0', + 'method', + 'eth_blockNumber', + 'params', + [] + ), + '{{ var('GLOBAL_NODE_SECRET_PATH') }}' + ) AS resp, + utils.udf_hex_to_int( + resp :data :result :: STRING + ) AS block_number \ No newline at end of file diff --git a/models/streamline/silver/core/streamline__get_chainhead.yml b/models/streamline/silver/core/streamline__get_chainhead.yml new file mode 100644 index 00000000..e318e4db --- /dev/null +++ b/models/streamline/silver/core/streamline__get_chainhead.yml @@ -0,0 +1,9 @@ +version: 2 +models: + - name: streamline__get_chainhead + description: "This model is used to get the chainhead from the blockchain." + + columns: + - name: BLOCK_NUMBER + tests: + - not_null \ No newline at end of file diff --git a/models/streamline/silver/decoded_logs/complete/streamline__decoded_logs_complete.sql b/models/streamline/silver/decoded_logs/complete/streamline__decoded_logs_complete.sql new file mode 100644 index 00000000..3e805898 --- /dev/null +++ b/models/streamline/silver/decoded_logs/complete/streamline__decoded_logs_complete.sql @@ -0,0 +1,50 @@ +{# Set variables #} +{%- set source_name = 'DECODED_LOGS' -%} +{%- set model_type = 'COMPLETE' -%} + +{%- set full_refresh_type = var((source_name ~ '_complete_full_refresh').upper(), false) -%} + +{% set post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(_log_id)" %} + +{# Log configuration details #} +{{ log_model_details() }} + +{# Set up dbt configuration #} +-- depends_on: {{ ref('bronze__' ~ source_name.lower()) }} + +{{ config ( + materialized = "incremental", + unique_key = "_log_id", + cluster_by = "ROUND(block_number, -3)", + incremental_predicates = ["dynamic_range", "block_number"], + merge_update_columns = ["_log_id"], + post_hook = post_hook, + full_refresh = full_refresh_type, + tags = ['streamline_decoded_logs_complete'] +) }} + +{# Main query starts here #} +SELECT + block_number, + file_name, + id AS _log_id, + {{ dbt_utils.generate_surrogate_key(['id']) }} AS complete_{{ source_name.lower() }}_id, + SYSDATE() AS inserted_timestamp, + SYSDATE() AS modified_timestamp, + _inserted_timestamp, + '{{ invocation_id }}' AS _invocation_id +FROM + {% if is_incremental() %} + {{ ref('bronze__' ~ source_name.lower()) }} + WHERE + _inserted_timestamp >= ( + SELECT + COALESCE(MAX(_inserted_timestamp), '1970-01-01'::TIMESTAMP) AS _inserted_timestamp + FROM + {{ this }} + ) + {% else %} + {{ ref('bronze__' ~ source_name.lower() ~ '_fr') }} + {% endif %} + +QUALIFY (ROW_NUMBER() OVER (PARTITION BY id ORDER BY _inserted_timestamp DESC)) = 1 diff --git a/models/streamline/silver/decoded_logs/realtime/streamline__decoded_logs_realtime.sql b/models/streamline/silver/decoded_logs/realtime/streamline__decoded_logs_realtime.sql new file mode 100644 index 00000000..a0ff21b9 --- /dev/null +++ b/models/streamline/silver/decoded_logs/realtime/streamline__decoded_logs_realtime.sql @@ -0,0 +1,110 @@ +{%- set testing_limit = var('DECODED_LOGS_REALTIME_TESTING_LIMIT', none) -%} + +{%- set streamline_params = { + "external_table": var("DECODED_LOGS_REALTIME_EXTERNAL_TABLE", "decoded_logs"), + "sql_limit": var("DECODED_LOGS_REALTIME_SQL_LIMIT", 10000000), + "producer_batch_size": var("DECODED_LOGS_REALTIME_PRODUCER_BATCH_SIZE", 400000), + "worker_batch_size": var("DECODED_LOGS_REALTIME_WORKER_BATCH_SIZE", 200000), + "sql_source": "decoded_logs_realtime" +} -%} + +{# Log configuration details #} +{{ log_model_details( + params = streamline_params +) }} + +{# Set up dbt configuration #} +{{ config ( + materialized = "view", + post_hook = [fsc_utils.if_data_call_function_v2( + func = 'streamline.udf_bulk_decode_logs_v2', + target = "{{this.schema}}.{{this.identifier}}", + params = { + "external_table": streamline_params['external_table'], + "sql_limit": streamline_params['sql_limit'], + "producer_batch_size": streamline_params['producer_batch_size'], + "worker_batch_size": streamline_params['worker_batch_size'], + "sql_source": streamline_params['sql_source'] + } + ), + fsc_utils.if_data_call_wait()], + tags = ['streamline_decoded_logs_realtime'] +) }} + +WITH target_blocks AS ( + SELECT + block_number + FROM + {{ ref('core__fact_blocks') }} + WHERE + block_number >= ( + SELECT + block_number + FROM + {{ ref('_24_hour_lookback') }} + ) +), +existing_logs_to_exclude AS ( + SELECT + _log_id + FROM + {{ ref('streamline__decoded_logs_complete') }} + l + INNER JOIN target_blocks b USING (block_number) + WHERE + l.inserted_timestamp :: DATE >= DATEADD('day', -2, SYSDATE()) +), +candidate_logs AS ( + SELECT + l.block_number, + l.tx_hash, + l.event_index, + l.contract_address, + l.topics, + l.data, + CONCAT( + l.tx_hash :: STRING, + '-', + l.event_index :: STRING + ) AS _log_id + FROM + target_blocks b + INNER JOIN {{ ref('core__fact_event_logs') }} + l USING (block_number) + WHERE + l.tx_succeeded + AND l.inserted_timestamp :: DATE >= DATEADD('day', -2, SYSDATE()) +) +SELECT + l.block_number, + l._log_id, + A.abi, + OBJECT_CONSTRUCT( + 'topics', + l.topics, + 'data', + l.data, + 'address', + l.contract_address + ) AS DATA +FROM + candidate_logs l + INNER JOIN {{ ref('silver__complete_event_abis') }} A + ON A.parent_contract_address = l.contract_address + AND A.event_signature = l.topics [0] :: STRING + AND l.block_number BETWEEN A.start_block + AND A.end_block +WHERE + NOT EXISTS ( + SELECT + 1 + FROM + existing_logs_to_exclude e + WHERE + e._log_id = l._log_id + ) + +{% if testing_limit is not none %} + LIMIT + {{ testing_limit }} +{% endif %} \ No newline at end of file diff --git a/models/streamline/silver/decoder/complete/streamline__complete_decode_logs.sql b/models/streamline/silver/decoder/complete/streamline__complete_decode_logs.sql deleted file mode 100644 index 4130953d..00000000 --- a/models/streamline/silver/decoder/complete/streamline__complete_decode_logs.sql +++ /dev/null @@ -1,32 +0,0 @@ --- depends_on: {{ ref('bronze__decoded_logs') }} -{{ config ( - materialized = "incremental", - unique_key = "_log_id", - cluster_by = "ROUND(block_number, -3)", - incremental_predicates = ["dynamic_range", "block_number"], - merge_update_columns = ["_log_id"], - post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(_log_id)", - tags = ['streamline_decoded_logs_complete'] -) }} - -SELECT - block_number, - id AS _log_id, - _inserted_timestamp -FROM - -{% if is_incremental() %} -{{ ref('bronze__decoded_logs') }} -WHERE - TO_TIMESTAMP_NTZ(_inserted_timestamp) >= ( - SELECT - COALESCE(MAX(TO_TIMESTAMP_NTZ(_inserted_timestamp)), '1970-01-01 00:00:00') _inserted_timestamp - FROM - {{ this }}) - {% else %} - {{ ref('bronze__fr_decoded_logs') }} - {% endif %} - - qualify(ROW_NUMBER() over (PARTITION BY id - ORDER BY - _inserted_timestamp DESC)) = 1 diff --git a/models/streamline/silver/decoder/realtime/streamline__decode_logs_realtime.sql b/models/streamline/silver/decoder/realtime/streamline__decode_logs_realtime.sql deleted file mode 100644 index 774583ab..00000000 --- a/models/streamline/silver/decoder/realtime/streamline__decode_logs_realtime.sql +++ /dev/null @@ -1,80 +0,0 @@ -{{ config ( - materialized = "view", - post_hook = [if_data_call_function( func = "{{this.schema}}.udf_bulk_decode_logs(object_construct('sql_source', '{{this.identifier}}','producer_batch_size', 20000000,'producer_limit_size', 20000000))", target = "{{this.schema}}.{{this.identifier}}" ),"call system$wait(" ~ var("WAIT", 400) ~ ")" ], - tags = ['streamline_decoded_logs_realtime'] -) }} - -WITH target_blocks AS ( - - SELECT - block_number - FROM - {{ ref('core__fact_blocks') }} - WHERE - block_number >= ( - SELECT - block_number - FROM - {{ ref("_block_lookback") }} - ) -), -existing_logs_to_exclude AS ( - SELECT - _log_id - FROM - {{ ref('streamline__complete_decode_logs') }} - l - INNER JOIN target_blocks b USING (block_number) - WHERE - l._inserted_timestamp :: DATE >= DATEADD('day', -5, SYSDATE()) -), -candidate_logs AS ( - SELECT - l.block_number, - l.tx_hash, - l.event_index, - l.contract_address, - l.topics, - l.data, - CONCAT( - l.tx_hash :: STRING, - '-', - l.event_index :: STRING - ) AS _log_id - FROM - target_blocks b - INNER JOIN {{ ref('core__fact_event_logs') }} - l USING (block_number) - WHERE - l.tx_succeeded - AND l.inserted_timestamp :: DATE >= DATEADD('day', -5, SYSDATE()) -) -SELECT - l.block_number, - l._log_id, - A.abi AS abi, - OBJECT_CONSTRUCT( - 'topics', - l.topics, - 'data', - l.data, - 'address', - l.contract_address - ) AS DATA -FROM - candidate_logs l - INNER JOIN {{ ref('silver__complete_event_abis') }} A - ON A.parent_contract_address = l.contract_address - AND A.event_signature = l.topics [0] :: STRING - AND l.block_number BETWEEN A.start_block - AND A.end_block -WHERE - NOT EXISTS ( - SELECT - 1 - FROM - existing_logs_to_exclude e - WHERE - e._log_id = l._log_id - ) -limit 7500000 \ No newline at end of file diff --git a/package-lock.yml b/package-lock.yml deleted file mode 100644 index ee9ea8a0..00000000 --- a/package-lock.yml +++ /dev/null @@ -1,18 +0,0 @@ -packages: -- package: calogica/dbt_expectations - version: 0.8.2 -- package: dbt-labs/dbt_external_tables - version: 0.8.2 -- package: dbt-labs/dbt_utils - version: 1.0.0 -- git: https://github.com/FlipsideCrypto/fsc-utils.git - revision: eb33ac727af26ebc8a8cc9711d4a6ebc3790a107 -- package: get-select/dbt_snowflake_query_tags - version: 2.5.0 -- git: https://github.com/FlipsideCrypto/fsc-evm.git - revision: ec6adae14ab4060ad4a553fb7f32d7e57693996d -- package: calogica/dbt_date - version: 0.7.2 -- git: https://github.com/FlipsideCrypto/livequery-models.git - revision: b024188be4e9c6bc00ed77797ebdc92d351d620e -sha1_hash: 622a679ecf98e6ebf3c904241902ce5328c77e52 diff --git a/packages.yml b/packages.yml index 4fb6364c..e90ff6f3 100644 --- a/packages.yml +++ b/packages.yml @@ -6,7 +6,7 @@ packages: - package: dbt-labs/dbt_utils version: 1.0.0 - git: https://github.com/FlipsideCrypto/fsc-utils.git - revision: v1.29.0 + revision: v1.31.0 - package: get-select/dbt_snowflake_query_tags version: [">=2.0.0", "<3.0.0"] - git: https://github.com/FlipsideCrypto/fsc-evm.git