Merge pull request #63 from FlipsideCrypto/add-traces-streamline-models

traces models
This commit is contained in:
Austin 2023-12-20 12:51:17 -05:00 committed by GitHub
commit b549df1372
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 221 additions and 1 deletions

View File

@ -0,0 +1,34 @@
name: dbt_run_streamline_traces_history
run-name: dbt_run_streamline_traces_history
on:
workflow_dispatch:
schedule:
# Run every 2 hours
- cron: "0 */2 * * *"
env:
USE_VARS: "${{ vars.USE_VARS }}"
DBT_PROFILES_DIR: "${{ vars.DBT_PROFILES_DIR }}"
DBT_VERSION: "${{ vars.DBT_VERSION }}"
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
called_workflow_template:
uses: FlipsideCrypto/analytics-workflow-templates/.github/workflows/dbt_run_template.yml@main
with:
dbt_command: >
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m 1+models/silver/streamline/history/streamline__traces_history.sql
environment: workflow_prod
warehouse: ${{ vars.WAREHOUSE }}
secrets: inherit

View File

@ -7,6 +7,5 @@
{% endset %}
{% do run_query(sql) %}
{{- fsc_utils.create_udfs() -}}
{% endif %}
{% endmacro %}

View File

@ -0,0 +1,57 @@
{{ config (
materialized = 'view'
) }}
WITH meta AS (
SELECT
registered_on AS _inserted_timestamp,
file_name,
CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER) AS _partition_by_block_id
FROM
TABLE(
information_schema.external_table_files(
table_name => '{{ source( "bronze_streamline", "traces") }}'
)
) A
)
SELECT
block_number,
s.value :metadata :request :params [0] :: STRING AS tx_hash,
DATA,
_inserted_timestamp,
MD5(
CAST(
COALESCE(CAST(tx_hash AS text), '' :: STRING) AS text
)
) AS id,
s._partition_by_block_id,
s.value AS VALUE
FROM
{{ source(
"bronze_streamline",
"traces"
) }}
s
JOIN meta b
ON b.file_name = metadata$filename
AND b._partition_by_block_id = s._partition_by_block_id
WHERE
b._partition_by_block_id = s._partition_by_block_id
AND (
DATA :error :code IS NULL
OR DATA :error :code NOT IN (
'-32000',
'-32001',
'-32002',
'-32003',
'-32004',
'-32005',
'-32006',
'-32007',
'-32008',
'-32009',
'-32010',
'-32608'
)
)

View File

@ -0,0 +1,57 @@
{{ config (
materialized = 'view'
) }}
WITH meta AS (
SELECT
last_modified AS _inserted_timestamp,
file_name,
CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER) AS _partition_by_block_id
FROM
TABLE(
information_schema.external_table_file_registration_history(
start_time => DATEADD('day', -3, CURRENT_TIMESTAMP()),
table_name => '{{ source( "bronze_streamline", "traces") }}')
) A
)
SELECT
block_number,
s.value :metadata :request :params [0] :: STRING AS tx_hash,
DATA,
_inserted_timestamp,
MD5(
CAST(
COALESCE(CAST(tx_hash AS text), '' :: STRING) AS text
)
) AS id,
s._partition_by_block_id,
s.value AS VALUE
FROM
{{ source(
"bronze_streamline",
"traces"
) }}
s
JOIN meta b
ON b.file_name = metadata$filename
AND b._partition_by_block_id = s._partition_by_block_id
WHERE
b._partition_by_block_id = s._partition_by_block_id
AND (
DATA :error :code IS NULL
OR DATA :error :code NOT IN (
'-32000',
'-32001',
'-32002',
'-32003',
'-32004',
'-32005',
'-32006',
'-32007',
'-32008',
'-32009',
'-32010',
'-32608'
)
)

View File

@ -0,0 +1,32 @@
-- depends_on: {{ ref('bronze__streamline_traces') }}
{{ config (
materialized = "incremental",
unique_key = "id",
cluster_by = "ROUND(block_number, -3)",
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(id)"
) }}
SELECT
id,
block_number,
tx_hash,
_inserted_timestamp
FROM
{% if is_incremental() %}
{{ ref('bronze__streamline_traces') }}
WHERE
_inserted_timestamp >=
(
SELECT
ifnull(MAX(_inserted_timestamp),'1900-01-01' :: timestamp_ntz ) _inserted_timestamp
FROM
{{ this }}
)
{% else %}
{{ ref('bronze__streamline_FR_traces') }}
{% endif %}
qualify(ROW_NUMBER() over (PARTITION BY id
ORDER BY
_inserted_timestamp DESC)) = 1

View File

@ -0,0 +1,40 @@
{{ config (
materialized = "view",
post_hook = if_data_call_function(
func = "{{this.schema}}.udf_json_rpc(object_construct('sql_source', '{{this.identifier}}', 'external_table', 'traces', 'producer_batch_size',10000, 'producer_limit_size',1000000, 'worker_batch_size',100))",
target = "{{this.schema}}.{{this.identifier}}"
)
) }}
WITH tbl AS (
SELECT
block_number,
tx_hash
FROM
{{ ref("silver__transactions") }}
WHERE
block_number IS NOT NULL
AND tx_hash IS NOT NULL
EXCEPT
SELECT
block_number,
tx_hash
FROM
{{ ref("streamline__complete_traces") }}
WHERE
block_number IS NOT NULL
AND tx_hash IS NOT NULL
)
SELECT
block_number,
'debug_traceTransaction' AS method,
CONCAT(
tx_hash,
'_-_',
'{"tracer": "callTracer","timeout": "30s"}'
) AS params
FROM
tbl
ORDER BY
block_number ASC

View File

@ -18,6 +18,7 @@ sources:
- name: blocks
- name: transactions
- name: tx_receipts
- name: traces
- name: crosschain_silver
database: crosschain