diff --git a/.github/workflows/dbt_run_streamline_history_adhoc.yml b/.github/workflows/dbt_run_streamline_history_adhoc.yml index a1d8053..46c1e51 100644 --- a/.github/workflows/dbt_run_streamline_history_adhoc.yml +++ b/.github/workflows/dbt_run_streamline_history_adhoc.yml @@ -29,6 +29,7 @@ on: description: 'DBT Run Command' required: true options: + - dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m "base_models,tag:streamline_core_complete" "base_models,tag:streamline_core_history" - dbt run --threads 8 --vars '{"STREAMLINE_INVOKE_STREAMS":True,"WAIT":120}' -m "base_models,tag:streamline_decoded_logs_complete" "base_models,tag:streamline_decoded_logs_history" env: diff --git a/models/streamline/silver/core/history/streamline__debug_traceBlockByNumber_history.sql b/models/streamline/silver/core/history/streamline__debug_traceBlockByNumber_history.sql new file mode 100644 index 0000000..3465fb8 --- /dev/null +++ b/models/streamline/silver/core/history/streamline__debug_traceBlockByNumber_history.sql @@ -0,0 +1,72 @@ +{{ config ( + materialized = "view", + post_hook = if_data_call_function( + func = "{{this.schema}}.udf_bulk_get_traces(object_construct('sql_source', '{{this.identifier}}','exploded_key','[\"result\"]', 'method', 'debug_traceBlockByNumber', 'external_table', 'debug_traceBlockByNumber', 'sql_limit', {{var('sql_limit','200000')}}, 'producer_batch_size', {{var('producer_batch_size','200000')}}, 'worker_batch_size', {{var('worker_batch_size','100000')}}, 'batch_call_limit', {{var('batch_call_limit','1')}}))", + target = "{{this.schema}}.{{this.identifier}}" + ), + tags = ['streamline_core_history'] +) }} + +WITH last_3_days AS ( + + SELECT + block_number + FROM + {{ ref("_max_block_by_date") }} + qualify ROW_NUMBER() over ( + ORDER BY + block_number DESC + ) = 3 +), +blocks AS ( + SELECT + block_number + FROM + {{ ref("streamline__blocks") }} + WHERE + block_number <= ( + SELECT + block_number + FROM + last_3_days + ) + EXCEPT + SELECT + block_number + FROM + {{ ref("streamline__complete_debug_traceBlockByNumber") }} + WHERE + block_number <= ( + SELECT + block_number + FROM + last_3_days + ) +) +SELECT + PARSE_JSON( + CONCAT( + '{"jsonrpc": "2.0",', + '"method": "debug_traceBlockByNumber", "params":["', + REPLACE( + concat_ws( + '', + '0x', + to_char( + block_number :: INTEGER, + 'XXXXXXXX' + ) + ), + ' ', + '' + ), + '",{"tracer": "callTracer","timeout": "30s"}', + '],"id":"', + block_number :: INTEGER, + '"}' + ) + ) AS request +FROM + blocks +ORDER BY + block_number ASC diff --git a/models/streamline/silver/core/history/streamline__qn_getBlockWithReceipts_history.sql b/models/streamline/silver/core/history/streamline__qn_getBlockWithReceipts_history.sql new file mode 100644 index 0000000..e91e080 --- /dev/null +++ b/models/streamline/silver/core/history/streamline__qn_getBlockWithReceipts_history.sql @@ -0,0 +1,71 @@ +{{ config ( + materialized = "view", + post_hook = if_data_call_function( + func = "{{this.schema}}.udf_bulk_json_rpc(object_construct('sql_source', '{{this.identifier}}', 'external_table', 'qn_getBlockWithReceipts', 'sql_limit', {{var('sql_limit','200000')}}, 'producer_batch_size', {{var('producer_batch_size','200000')}}, 'worker_batch_size', {{var('worker_batch_size','100000')}}, 'batch_call_limit', {{var('batch_call_limit','1')}}))", + target = "{{this.schema}}.{{this.identifier}}" + ), + tags = ['streamline_core_history'] +) }} + +WITH last_3_days AS ( + + SELECT + block_number + FROM + {{ ref("_max_block_by_date") }} + qualify ROW_NUMBER() over ( + ORDER BY + block_number DESC + ) = 3 +), +blocks AS ( + SELECT + block_number :: STRING AS block_number + FROM + {{ ref("streamline__blocks") }} + WHERE + block_number <= ( + SELECT + block_number + FROM + last_3_days + ) + EXCEPT + SELECT + block_number :: STRING + FROM + {{ ref("streamline__complete_qn_getBlockWithReceipts") }} + WHERE + block_number <= ( + SELECT + block_number + FROM + last_3_days + ) +) +SELECT + PARSE_JSON( + CONCAT( + '{"jsonrpc": "2.0",', + '"method": "qn_getBlockWithReceipts", "params":["', + REPLACE( + concat_ws( + '', + '0x', + to_char( + block_number :: INTEGER, + 'XXXXXXXX' + ) + ), + ' ', + '' + ), + '"],"id":"', + block_number :: INTEGER, + '"}' + ) + ) AS request +FROM + blocks +ORDER BY + block_number ASC