AN-3366/gno-core-streamline (#66)

* workflows, fr logic, realtime

* retry logic and tests

* receipts

* removed FR config on obs

* observ var

* workflow test env and monthly test
This commit is contained in:
drethereum 2023-08-15 14:30:51 -06:00 committed by GitHub
parent bf57787bb0
commit fda0bf68fe
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
56 changed files with 2024 additions and 61 deletions

View File

@ -1,12 +1,12 @@
name: dbt_run_temp_traces2_backfill
run-name: dbt_run_temp_traces2_backfill
name: dbt_run_full_observability
run-name: dbt_run_full_observability
on:
workflow_dispatch:
schedule:
# Runs "every 1 hour at min 15" (see https://crontab.guru)
- cron: '15 */1 * * *'
# Runs “At 18:00 on day-of-month 1.” (see https://crontab.guru)
- cron: '0 18 1 * *'
env:
DBT_PROFILES_DIR: ./
@ -42,4 +42,7 @@ jobs:
dbt deps
- name: Run DBT Jobs
run: |
dbt run -m models/silver/core/silver__traces2.sql
dbt run --threads 2 --vars '{"OBSERV_FULL_TEST":True}' -m tag:observability

View File

@ -42,4 +42,4 @@ jobs:
dbt deps
- name: Run DBT Jobs
run: |
dbt run -m models/silver/core --exclude models/silver/core/silver__traces2.sql
dbt run -m models/silver/core

View File

@ -1,5 +1,5 @@
name: dbt_test
run-name: dbt_test
name: dbt_test_daily
run-name: dbt_test_daily
on:
workflow_dispatch:
@ -21,27 +21,26 @@ env:
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
name: workflow_test
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v1
- uses: actions/setup-python@v4
with:
python-version: "3.7.x"
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip3 install dbt-snowflake==${{ vars.DBT_VERSION }} cli_passthrough requests click
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
dbt test
dbt test --exclude tag:full_test tag:recent_test

46
.github/workflows/dbt_test_intraday.yml vendored Normal file
View File

@ -0,0 +1,46 @@
name: dbt_test_intraday
run-name: dbt_test_intraday
on:
workflow_dispatch:
schedule:
# Runs “At minute 5 past every 4th hour.” (see https://crontab.guru)
- cron: '5 */4 * * *'
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_test
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
dbt run -m tag:observability
dbt test -m tag:recent_test

48
.github/workflows/dbt_test_monthly.yml vendored Normal file
View File

@ -0,0 +1,48 @@
name: dbt_test_monthly
run-name: dbt_test_monthly
on:
workflow_dispatch:
schedule:
# Runs “At 18:00 on day-of-month 1.” (see https://crontab.guru)
- cron: '0 18 1 * *'
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_test
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
dbt test -m tag:full_test

View File

@ -50,4 +50,5 @@ vars:
UPDATE_UDFS_AND_SPS: False
UPDATE_SNOWFLAKE_TAGS: True
STREAMLINE_INVOKE_STREAMS: False
STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES: False
STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES: False
OBSERV_FULL_TEST: False

View File

@ -0,0 +1,103 @@
{% macro missing_txs(
model
) %}
WITH txs_base AS (
SELECT
block_number AS base_block_number,
tx_hash AS base_tx_hash
FROM
{{ ref('test_silver__transactions_full') }}
),
model_name AS (
SELECT
block_number AS model_block_number,
tx_hash AS model_tx_hash
FROM
{{ model }}
)
SELECT
base_block_number,
base_tx_hash,
model_block_number,
model_tx_hash
FROM
txs_base
LEFT JOIN model_name
ON base_block_number = model_block_number
AND base_tx_hash = model_tx_hash
WHERE
(
model_tx_hash IS NULL
OR model_block_number IS NULL
)
{% endmacro %}
{% macro recent_missing_txs(
model
) %}
WITH txs_base AS (
SELECT
block_number AS base_block_number,
tx_hash AS base_tx_hash
FROM
{{ ref('test_silver__transactions_recent') }}
),
model_name AS (
SELECT
block_number AS model_block_number,
tx_hash AS model_tx_hash
FROM
{{ model }}
)
SELECT
base_block_number,
base_tx_hash,
model_block_number,
model_tx_hash
FROM
txs_base
LEFT JOIN model_name
ON base_block_number = model_block_number
AND base_tx_hash = model_tx_hash
WHERE
model_tx_hash IS NULL
OR model_block_number IS NULL
{% endmacro %}
{% macro missing_confirmed_txs(
model1,
model2
) %}
WITH txs_base AS (
SELECT
block_number AS base_block_number,
block_hash AS base_block_hash,
tx_hash AS base_tx_hash
FROM
{{ model1 }}
),
model_name AS (
SELECT
block_number AS model_block_number,
block_hash AS model_block_hash,
tx_hash AS model_tx_hash
FROM
{{ model2 }}
)
SELECT
DISTINCT base_block_number AS block_number
FROM
txs_base
LEFT JOIN model_name
ON base_block_number = model_block_number
AND base_tx_hash = model_tx_hash
AND base_block_hash = model_block_hash
WHERE
model_tx_hash IS NULL
AND model_block_number <= (
SELECT
MAX(base_block_number)
FROM
txs_base
)
{% endmacro %}

View File

@ -0,0 +1,167 @@
{{ config(
materialized = 'incremental',
unique_key = 'test_timestamp',
tags = ['observability']
) }}
-- full_refresh = false, add back after blocks replayed
WITH summary_stats AS (
SELECT
MIN(block_number) AS min_block,
MAX(block_number) AS max_block,
MIN(block_timestamp) AS min_block_timestamp,
MAX(block_timestamp) AS max_block_timestamp,
COUNT(1) AS blocks_tested
FROM
{{ ref('silver__blocks2') }}
WHERE
block_timestamp <= DATEADD('hour', -12, CURRENT_TIMESTAMP())
{% if is_incremental() %}
AND (
block_number >= (
SELECT
MIN(block_number)
FROM
(
SELECT
MIN(block_number) AS block_number
FROM
{{ ref('silver__blocks2') }}
WHERE
block_timestamp BETWEEN DATEADD('hour', -96, CURRENT_TIMESTAMP())
AND DATEADD('hour', -95, CURRENT_TIMESTAMP())
UNION
SELECT
MIN(VALUE) - 1 AS block_number
FROM
(
SELECT
blocks_impacted_array
FROM
{{ this }}
qualify ROW_NUMBER() over (
ORDER BY
test_timestamp DESC
) = 1
),
LATERAL FLATTEN(
input => blocks_impacted_array
)
)
) {% if var('OBSERV_FULL_TEST') %}
OR block_number >= 0
{% endif %}
)
{% endif %}
),
block_range AS (
SELECT
_id AS block_number
FROM
{{ source(
'crosschain_silver',
'number_sequence'
) }}
WHERE
_id BETWEEN (
SELECT
min_block
FROM
summary_stats
)
AND (
SELECT
max_block
FROM
summary_stats
)
),
blocks AS (
SELECT
l.block_number,
block_timestamp,
LAG(
l.block_number,
1
) over (
ORDER BY
l.block_number ASC
) AS prev_BLOCK_NUMBER
FROM
{{ ref("silver__blocks2") }}
l
INNER JOIN block_range b
ON l.block_number = b.block_number
AND l.block_number >= (
SELECT
MIN(block_number)
FROM
block_range
)
),
block_gen AS (
SELECT
_id AS block_number
FROM
{{ source(
'crosschain_silver',
'number_sequence'
) }}
WHERE
_id BETWEEN (
SELECT
MIN(block_number)
FROM
blocks
)
AND (
SELECT
MAX(block_number)
FROM
blocks
)
)
SELECT
'blocks' AS test_name,
MIN(
b.block_number
) AS min_block,
MAX(
b.block_number
) AS max_block,
MIN(
b.block_timestamp
) AS min_block_timestamp,
MAX(
b.block_timestamp
) AS max_block_timestamp,
COUNT(1) AS blocks_tested,
COUNT(
CASE
WHEN C.block_number IS NOT NULL THEN A.block_number
END
) AS blocks_impacted_count,
ARRAY_AGG(
CASE
WHEN C.block_number IS NOT NULL THEN A.block_number
END
) within GROUP (
ORDER BY
A.block_number
) AS blocks_impacted_array,
CURRENT_TIMESTAMP AS test_timestamp
FROM
block_gen A
LEFT JOIN blocks b
ON A.block_number = b.block_number
LEFT JOIN blocks C
ON A.block_number > C.prev_block_number
AND A.block_number < C.block_number
AND C.block_number - C.prev_block_number <> 1
WHERE
COALESCE(
b.block_number,
C.block_number
) IS NOT NULL

View File

@ -0,0 +1,123 @@
{{ config(
materialized = 'incremental',
unique_key = 'test_timestamp',
tags = ['observability']
) }}
-- full_refresh = false, add back after blocks replayed
WITH summary_stats AS (
SELECT
MIN(block_number) AS min_block,
MAX(block_number) AS max_block,
MIN(block_timestamp) AS min_block_timestamp,
MAX(block_timestamp) AS max_block_timestamp,
COUNT(1) AS blocks_tested
FROM
{{ ref('silver__blocks2') }}
WHERE
block_timestamp <= DATEADD('hour', -12, CURRENT_TIMESTAMP())
{% if is_incremental() %}
AND (
block_number >= (
SELECT
MIN(block_number)
FROM
(
SELECT
MIN(block_number) AS block_number
FROM
{{ ref('silver__blocks2') }}
WHERE
block_timestamp BETWEEN DATEADD('hour', -96, CURRENT_TIMESTAMP())
AND DATEADD('hour', -95, CURRENT_TIMESTAMP())
UNION
SELECT
MIN(VALUE) - 1 AS block_number
FROM
(
SELECT
blocks_impacted_array
FROM
{{ this }}
qualify ROW_NUMBER() over (
ORDER BY
test_timestamp DESC
) = 1
),
LATERAL FLATTEN(
input => blocks_impacted_array
)
)
) {% if var('OBSERV_FULL_TEST') %}
OR block_number >= 0
{% endif %}
)
{% endif %}
),
block_range AS (
SELECT
_id AS block_number
FROM
{{ source(
'crosschain_silver',
'number_sequence'
) }}
WHERE
_id BETWEEN (
SELECT
min_block
FROM
summary_stats
)
AND (
SELECT
max_block
FROM
summary_stats
)
),
broken_blocks AS (
SELECT
DISTINCT block_number
FROM
{{ ref("silver__receipts") }}
r
LEFT JOIN {{ ref("silver__logs2") }}
l USING (
block_number,
tx_hash
)
JOIN block_range USING (block_number)
WHERE
l.tx_hash IS NULL
AND ARRAY_SIZE(
r.logs
) > 0
),
impacted_blocks AS (
SELECT
COUNT(1) AS blocks_impacted_count,
ARRAY_AGG(block_number) within GROUP (
ORDER BY
block_number
) AS blocks_impacted_array
FROM
broken_blocks
)
SELECT
'event_logs' AS test_name,
min_block,
max_block,
min_block_timestamp,
max_block_timestamp,
blocks_tested,
blocks_impacted_count,
blocks_impacted_array,
CURRENT_TIMESTAMP() AS test_timestamp
FROM
summary_stats
JOIN impacted_blocks
ON 1 = 1

View File

@ -0,0 +1,121 @@
{{ config(
materialized = 'incremental',
unique_key = 'test_timestamp',
tags = ['observability']
) }}
-- full_refresh = false, add back after blocks replayed
WITH summary_stats AS (
SELECT
MIN(block_number) AS min_block,
MAX(block_number) AS max_block,
MIN(block_timestamp) AS min_block_timestamp,
MAX(block_timestamp) AS max_block_timestamp,
COUNT(1) AS blocks_tested
FROM
{{ ref('silver__blocks2') }}
WHERE
block_timestamp <= DATEADD('hour', -12, CURRENT_TIMESTAMP())
{% if is_incremental() %}
AND (
block_number >= (
SELECT
MIN(block_number)
FROM
(
SELECT
MIN(block_number) AS block_number
FROM
{{ ref('silver__blocks2') }}
WHERE
block_timestamp BETWEEN DATEADD('hour', -96, CURRENT_TIMESTAMP())
AND DATEADD('hour', -95, CURRENT_TIMESTAMP())
UNION
SELECT
MIN(VALUE) - 1 AS block_number
FROM
(
SELECT
blocks_impacted_array
FROM
{{ this }}
qualify ROW_NUMBER() over (
ORDER BY
test_timestamp DESC
) = 1
),
LATERAL FLATTEN(
input => blocks_impacted_array
)
)
) {% if var('OBSERV_FULL_TEST') %}
OR block_number >= 0
{% endif %}
)
{% endif %}
),
block_range AS (
SELECT
_id AS block_number
FROM
{{ source(
'crosschain_silver',
'number_sequence'
) }}
WHERE
_id BETWEEN (
SELECT
min_block
FROM
summary_stats
)
AND (
SELECT
max_block
FROM
summary_stats
)
),
broken_blocks AS (
SELECT
DISTINCT block_number
FROM
{{ ref("silver__transactions2") }}
t
LEFT JOIN {{ ref("silver__receipts") }}
r USING (
block_number,
tx_hash,
block_hash
)
JOIN block_range USING (block_number)
WHERE
r.tx_hash IS NULL
),
impacted_blocks AS (
SELECT
COUNT(1) AS blocks_impacted_count,
ARRAY_AGG(block_number) within GROUP (
ORDER BY
block_number
) AS blocks_impacted_array
FROM
broken_blocks
)
SELECT
'receipts' AS test_name,
min_block,
max_block,
min_block_timestamp,
max_block_timestamp,
blocks_tested,
blocks_impacted_count,
blocks_impacted_array,
CURRENT_TIMESTAMP() AS test_timestamp
FROM
summary_stats
JOIN impacted_blocks
ON 1 = 1

View File

@ -0,0 +1,120 @@
{{ config(
materialized = 'incremental',
unique_key = 'test_timestamp',
tags = ['observability']
) }}
-- full_refresh = false, add back after blocks replayed
WITH summary_stats AS (
SELECT
MIN(block_number) AS min_block,
MAX(block_number) AS max_block,
MIN(block_timestamp) AS min_block_timestamp,
MAX(block_timestamp) AS max_block_timestamp,
COUNT(1) AS blocks_tested
FROM
{{ ref('silver__blocks2') }}
WHERE
block_timestamp <= DATEADD('hour', -12, CURRENT_TIMESTAMP())
{% if is_incremental() %}
AND (
block_number >= (
SELECT
MIN(block_number)
FROM
(
SELECT
MIN(block_number) AS block_number
FROM
{{ ref('silver__blocks2') }}
WHERE
block_timestamp BETWEEN DATEADD('hour', -96, CURRENT_TIMESTAMP())
AND DATEADD('hour', -95, CURRENT_TIMESTAMP())
UNION
SELECT
MIN(VALUE) - 1 AS block_number
FROM
(
SELECT
blocks_impacted_array
FROM
{{ this }}
qualify ROW_NUMBER() over (
ORDER BY
test_timestamp DESC
) = 1
),
LATERAL FLATTEN(
input => blocks_impacted_array
)
)
) {% if var('OBSERV_FULL_TEST') %}
OR block_number >= 0
{% endif %}
)
{% endif %}
),
block_range AS (
SELECT
_id AS block_number
FROM
{{ source(
'crosschain_silver',
'number_sequence'
) }}
WHERE
_id BETWEEN (
SELECT
min_block
FROM
summary_stats
)
AND (
SELECT
max_block
FROM
summary_stats
)
),
broken_blocks AS (
SELECT
DISTINCT block_number
FROM
{{ ref("silver__transactions2") }}
tx
LEFT JOIN {{ ref("silver__traces2") }}
tr USING (
block_number,
tx_hash
)
JOIN block_range USING (block_number)
WHERE
tr.tx_hash IS NULL
),
impacted_blocks AS (
SELECT
COUNT(1) AS blocks_impacted_count,
ARRAY_AGG(block_number) within GROUP (
ORDER BY
block_number
) AS blocks_impacted_array
FROM
broken_blocks
)
SELECT
'traces' AS test_name,
min_block,
max_block,
min_block_timestamp,
max_block_timestamp,
blocks_tested,
blocks_impacted_count,
blocks_impacted_array,
CURRENT_TIMESTAMP() AS test_timestamp
FROM
summary_stats
JOIN impacted_blocks
ON 1 = 1

View File

@ -0,0 +1,121 @@
{{ config(
materialized = 'incremental',
unique_key = 'test_timestamp',
tags = ['observability']
) }}
-- full_refresh = false, add back after blocks replayed
WITH summary_stats AS (
SELECT
MIN(block_number) AS min_block,
MAX(block_number) AS max_block,
MIN(block_timestamp) AS min_block_timestamp,
MAX(block_timestamp) AS max_block_timestamp,
COUNT(1) AS blocks_tested
FROM
{{ ref('silver__blocks2') }}
WHERE
block_timestamp <= DATEADD('hour', -12, CURRENT_TIMESTAMP())
{% if is_incremental() %}
AND (
block_number >= (
SELECT
MIN(block_number)
FROM
(
SELECT
MIN(block_number) AS block_number
FROM
{{ ref('silver__blocks2') }}
WHERE
block_timestamp BETWEEN DATEADD('hour', -96, CURRENT_TIMESTAMP())
AND DATEADD('hour', -95, CURRENT_TIMESTAMP())
UNION
SELECT
MIN(VALUE) - 1 AS block_number
FROM
(
SELECT
blocks_impacted_array
FROM
{{ this }}
qualify ROW_NUMBER() over (
ORDER BY
test_timestamp DESC
) = 1
),
LATERAL FLATTEN(
input => blocks_impacted_array
)
)
) {% if var('OBSERV_FULL_TEST') %}
OR block_number >= 0
{% endif %}
)
{% endif %}
),
block_range AS (
SELECT
_id AS block_number
FROM
{{ source(
'crosschain_silver',
'number_sequence'
) }}
WHERE
_id BETWEEN (
SELECT
min_block
FROM
summary_stats
)
AND (
SELECT
max_block
FROM
summary_stats
)
),
broken_blocks AS (
SELECT
DISTINCT block_number
FROM
{{ ref("silver__confirmed_blocks") }}
b
LEFT JOIN {{ ref("silver__transactions2") }}
t USING (
block_number,
tx_hash,
block_hash
)
JOIN block_range USING (block_number)
WHERE
t.tx_hash IS NULL
),
impacted_blocks AS (
SELECT
COUNT(1) AS blocks_impacted_count,
ARRAY_AGG(block_number) within GROUP (
ORDER BY
block_number
) AS blocks_impacted_array
FROM
broken_blocks
)
SELECT
'transactions' AS test_name,
min_block,
max_block,
min_block_timestamp,
max_block_timestamp,
blocks_tested,
blocks_impacted_count,
blocks_impacted_array,
CURRENT_TIMESTAMP() AS test_timestamp
FROM
summary_stats
JOIN impacted_blocks
ON 1 = 1

View File

@ -3,7 +3,8 @@
materialized = 'incremental',
unique_key = "block_number",
cluster_by = "block_timestamp::date",
tags = ['non_realtime']
tags = ['non_realtime'],
full_refresh = false
) }}
SELECT

View File

@ -4,9 +4,9 @@
incremental_strategy = 'delete+insert',
unique_key = "block_number",
cluster_by = "round(block_number,-3)",
tags = ['non_realtime']
tags = ['non_realtime'],
full_refresh = false
) }}
--full_refresh = false --add after backfill
WITH base AS (

View File

@ -4,9 +4,9 @@
unique_key = "block_number",
cluster_by = "block_timestamp::date, _inserted_timestamp::date",
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION",
tags = ['non_realtime']
tags = ['non_realtime'],
full_refresh = false
) }}
-- full_refresh = false, add back after backfill
WITH base AS (

View File

@ -5,9 +5,9 @@
unique_key = "block_number",
cluster_by = "ROUND(block_number, -3)",
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(tx_hash)",
tags = ['non_realtime']
tags = ['non_realtime'],
full_refresh = false
) }}
-- full_refresh = false, add back after backfill
WITH base AS (

View File

@ -5,11 +5,10 @@
unique_key = "block_number",
cluster_by = "block_timestamp::date, _inserted_timestamp::date",
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION",
tags = ['non_realtime']
tags = ['non_realtime'],
full_refresh = false
) }}
-- full_refresh = false, add back after backfill
WITH traces_txs AS (
SELECT
@ -20,27 +19,23 @@ WITH traces_txs AS (
FROM
{% if is_incremental() %}
{{ ref('bronze__streamline_FR_traces') }}
{{ ref('bronze__streamline_traces') }}
WHERE
_partition_by_block_id BETWEEN (
_inserted_timestamp >= (
SELECT
ROUND(MAX(block_number), -4)
MAX(_inserted_timestamp) _inserted_timestamp
FROM
{{ this }})
AND (
SELECT
ROUND(MAX(block_number), -4) + 2000000
FROM
{{ this }})
{% else %}
{{ ref('bronze__streamline_FR_traces') }}
WHERE
_partition_by_block_id <= 2500000
{% endif %}
{{ this }}
)
{% else %}
{{ ref('bronze__streamline_FR_traces') }}
WHERE
_partition_by_block_id <= 5000000
{% endif %}
qualify(ROW_NUMBER() over (PARTITION BY block_number, tx_position
ORDER BY
_inserted_timestamp DESC)) = 1
qualify(ROW_NUMBER() over (PARTITION BY block_number, tx_position
ORDER BY
_inserted_timestamp DESC)) = 1
),
base_table AS (
SELECT
@ -259,15 +254,14 @@ flattened_traces AS (
ON f.tx_position = t.position
AND f.block_number = t.block_number
--add back after backfill
{# {% if is_incremental() %}
{% if is_incremental() %}
AND t._INSERTED_TIMESTAMP >= (
SELECT
MAX(_inserted_timestamp) :: DATE - 1
FROM
{{ this }}
)
{% endif %} #}
{% endif %}
)
{% if is_incremental() %},

View File

@ -5,9 +5,10 @@
unique_key = "block_number",
cluster_by = "block_timestamp::date, _inserted_timestamp::date",
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION",
tags = ['non_realtime']
tags = ['non_realtime'],
full_refresh = false
) }}
--full-refresh = false --add after backfill
WITH base AS (
SELECT

View File

@ -0,0 +1,9 @@
{{ config (
materialized = 'view',
tags = ['full_test']
) }}
SELECT
*
FROM
{{ ref('silver__blocks2') }}

View File

@ -0,0 +1,94 @@
version: 2
models:
- name: test_silver__blocks_full
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- BLOCK_NUMBER
- fsc_utils.sequence_gaps:
column_name: BLOCK_NUMBER
where: BLOCK_TIMESTAMP < CURRENT_DATE - 1
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: BLOCK_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_NTZ
- name: DIFFICULTY
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: TOTAL_DIFFICULTY
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: EXTRA_DATA
tests:
- not_null
- name: GAS_LIMIT
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: GAS_USED
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: PARENT_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: MINER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: NONCE
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- name: RECEIPTS_ROOT
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: SHA3_UNCLES
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: SIZE
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER

View File

@ -0,0 +1,27 @@
{{ config (
materialized = 'view',
tags = ['recent_test']
) }}
WITH last_3_days AS (
SELECT
block_number
FROM
{{ ref("_max_block_by_date") }}
qualify ROW_NUMBER() over (
ORDER BY
block_number DESC
) = 3
)
SELECT
*
FROM
{{ ref('silver__blocks2') }}
WHERE
block_number >= (
SELECT
block_number
FROM
last_3_days
)

View File

@ -0,0 +1,26 @@
version: 2
models:
- name: test_silver__blocks_recent
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- BLOCK_NUMBER
- fsc_utils.sequence_gaps:
column_name: BLOCK_NUMBER
config:
severity: error
error_if: ">10"
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: BLOCK_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: hour
interval: 3
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_NTZ

View File

@ -0,0 +1,9 @@
{{ config (
materialized = 'view',
tags = ['full_test']
) }}
SELECT
*
FROM
{{ ref('silver__confirmed_blocks') }}

View File

@ -0,0 +1,34 @@
version: 2
models:
- name: test_silver__confirmed_blocks_full
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- TX_HASH
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- name: BLOCK_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: _INSERTED_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 1
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_NTZ
- TIMESTAMP_LTZ

View File

@ -0,0 +1,27 @@
{{ config (
materialized = 'view',
tags = ['recent_test']
) }}
WITH last_3_days AS (
SELECT
block_number
FROM
{{ ref("_max_block_by_date") }}
qualify ROW_NUMBER() over (
ORDER BY
block_number DESC
) = 3
)
SELECT
*
FROM
{{ ref('silver__confirmed_blocks') }}
WHERE
block_number >= (
SELECT
block_number
FROM
last_3_days
)

View File

@ -0,0 +1,34 @@
version: 2
models:
- name: test_silver__confirmed_blocks_recent
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- TX_HASH
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- name: BLOCK_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: _INSERTED_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: hour
interval: 3
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_NTZ
- TIMESTAMP_LTZ

View File

@ -0,0 +1,9 @@
{{ config (
materialized = 'view',
tags = ['full_test']
) }}
SELECT
*
FROM
{{ ref('silver__logs2') }}

View File

@ -0,0 +1,76 @@
version: 2
models:
- name: test_silver__logs_full
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
- fsc_utils.sequence_gaps:
partition_by:
- BLOCK_NUMBER
- TX_HASH
column_name: EVENT_INDEX
where: BLOCK_TIMESTAMP < CURRENT_DATE - 1
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- name: BLOCK_TIMESTAMP
tests:
- not_null:
where: NOT IS_PENDING
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 1
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_NTZ
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- fsc_utils.tx_block_count:
config:
severity: error
error_if: "!=0"
- name: EVENT_INDEX
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: CONTRACT_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TOPICS
tests:
- not_null
- name: DATA
tests:
- not_null
- name: EVENT_REMOVED
tests:
- not_null
- name: _LOG_ID
tests:
- not_null
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null:
where: NOT IS_PENDING
- name: ORIGIN_FROM_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: ORIGIN_TO_ADDRESS
tests:
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+

View File

@ -0,0 +1,27 @@
{{ config (
materialized = 'view',
tags = ['recent_test']
) }}
WITH last_3_days AS (
SELECT
block_number
FROM
{{ ref("_max_block_by_date") }}
qualify ROW_NUMBER() over (
ORDER BY
block_number DESC
) = 3
)
SELECT
*
FROM
{{ ref('silver__logs2') }}
WHERE
block_number >= (
SELECT
block_number
FROM
last_3_days
)

View File

@ -0,0 +1,32 @@
version: 2
models:
- name: test_silver__logs_recent
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
- fsc_utils.sequence_gaps:
partition_by:
- BLOCK_NUMBER
- TX_HASH
column_name: EVENT_INDEX
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- name: BLOCK_TIMESTAMP
tests:
- not_null:
where: NOT IS_PENDING
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: hour
interval: 3
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_NTZ
- name: TX_HASH
tests:
- not_null

View File

@ -0,0 +1,9 @@
{{ config (
materialized = 'view',
tags = ['full_test']
) }}
SELECT
*
FROM
{{ ref('silver__receipts') }}

View File

@ -0,0 +1,82 @@
version: 2
models:
- name: test_silver__receipts_full
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- TX_HASH
- fsc_utils.sequence_gaps:
partition_by:
- BLOCK_NUMBER
column_name: POSITION
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: POSITION
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- name: FROM_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TO_ADDRESS
tests:
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
where: TO_ADDRESS IS NOT NULL
- name: BLOCK_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: CUMULATIVE_GAS_USED
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: EFFECTIVE_GAS_PRICE
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: GAS_USED
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: TX_STATUS
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_set:
value_set: ['SUCCESS', 'FAIL']
- name: TYPE
tests:
- not_null
- name: _INSERTED_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 1

View File

@ -0,0 +1,27 @@
{{ config (
materialized = 'view',
tags = ['recent_test']
) }}
WITH last_3_days AS (
SELECT
block_number
FROM
{{ ref("_max_block_by_date") }}
qualify ROW_NUMBER() over (
ORDER BY
block_number DESC
) = 3
)
SELECT
*
FROM
{{ ref('silver__receipts') }}
WHERE
block_number >= (
SELECT
block_number
FROM
last_3_days
)

View File

@ -0,0 +1,28 @@
version: 2
models:
- name: test_silver__receipts_recent
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- TX_HASH
- fsc_utils.sequence_gaps:
partition_by:
- BLOCK_NUMBER
column_name: POSITION
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: _INSERTED_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: hour
interval: 3

View File

@ -0,0 +1,9 @@
{{ config (
materialized = 'view',
tags = ['full_test']
) }}
SELECT
*
FROM
{{ ref('silver__traces2') }}

View File

@ -0,0 +1,56 @@
version: 2
models:
- name: test_silver__traces_full
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _CALL_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: BLOCK_TIMESTAMP
tests:
- not_null:
where: NOT IS_PENDING
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 1
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_NTZ
- name: TX_HASH
tests:
- not_null:
where: NOT IS_PENDING
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: FROM_ADDRESS
tests:
- not_null:
where: TYPE <> 'SELFDESTRUCT'
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TO_ADDRESS
tests:
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
where: TO_ADDRESS IS NOT NULL
- name: IDENTIFIER
tests:
- not_null
- name: AVAX_VALUE
tests:
- not_null
- name: GAS
tests:
- not_null
- name: GAS_USED
tests:
- not_null

View File

@ -0,0 +1,27 @@
{{ config (
materialized = 'view',
tags = ['recent_test']
) }}
WITH last_3_days AS (
SELECT
block_number
FROM
{{ ref("_max_block_by_date") }}
qualify ROW_NUMBER() over (
ORDER BY
block_number DESC
) = 3
)
SELECT
*
FROM
{{ ref('silver__traces2') }}
WHERE
block_number >= (
SELECT
block_number
FROM
last_3_days
)

View File

@ -0,0 +1,32 @@
version: 2
models:
- name: test_silver__traces_recent
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _CALL_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: BLOCK_TIMESTAMP
tests:
- not_null:
where: NOT IS_PENDING
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 1
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_NTZ
- name: TX_HASH
tests:
- not_null:
where: NOT IS_PENDING
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+

View File

@ -0,0 +1,9 @@
{{ config (
materialized = 'view',
tags = ['full_test']
) }}
SELECT
*
FROM
{{ ref('silver__transactions2') }}

View File

@ -0,0 +1,118 @@
version: 2
models:
- name: test_silver__transactions_full
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- TX_HASH
- fsc_utils.sequence_gaps:
partition_by:
- BLOCK_NUMBER
column_name: POSITION
where: BLOCK_TIMESTAMP < CURRENT_DATE - 1
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: BLOCK_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 1
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_NTZ
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: NONCE
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: POSITION
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- name: FROM_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TO_ADDRESS
tests:
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
where: TO_ADDRESS IS NOT NULL
- name: VALUE
tests:
- not_null
- name: BLOCK_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: GAS_PRICE
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: GAS
tests:
- not_null
- name: INPUT_DATA
tests:
- not_null
- name: TX_STATUS
tests:
- not_null:
where: NOT IS_PENDING
- dbt_expectations.expect_column_values_to_be_in_set:
value_set: ['SUCCESS', 'FAIL']
where: NOT IS_PENDING
- name: GAS_USED
tests:
- not_null:
where: NOT IS_PENDING
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: CUMULATIVE_GAS_USED
tests:
- not_null:
where: NOT IS_PENDING
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: TX_FEE
tests:
- not_null:
where: NOT IS_PENDING
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: EFFECTIVE_GAS_PRICE
tests:
- not_null:
where: NOT IS_PENDING
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null

View File

@ -0,0 +1,27 @@
{{ config (
materialized = 'view',
tags = ['recent_test']
) }}
WITH last_3_days AS (
SELECT
block_number
FROM
{{ ref("_max_block_by_date") }}
qualify ROW_NUMBER() over (
ORDER BY
block_number DESC
) = 3
)
SELECT
*
FROM
{{ ref('silver__transactions2') }}
WHERE
block_number >= (
SELECT
block_number
FROM
last_3_days
)

View File

@ -0,0 +1,22 @@
version: 2
models:
- name: test_silver__transactions_recent
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- TX_HASH
- fsc_utils.sequence_gaps:
partition_by:
- BLOCK_NUMBER
column_name: POSITION
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: BLOCK_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: hour
interval: 3

View File

@ -3,19 +3,47 @@
post_hook = if_data_call_function(
func = "{{this.schema}}.udf_bulk_json_rpc(object_construct('sql_source', '{{this.identifier}}', 'external_table', 'confirm_blocks', 'sql_limit', {{var('sql_limit','50000')}}, 'producer_batch_size', {{var('producer_batch_size','25000')}}, 'worker_batch_size', {{var('worker_batch_size','12500')}}, 'batch_call_limit', {{var('batch_call_limit','10')}}, 'call_type', 'batch'))",
target = "{{this.schema}}.{{this.identifier}}"
)
),
tags = ['streamline_core_realtime']
) }}
with tbl AS (
WITH look_back AS (
SELECT
block_number
FROM
{{ ref("_max_block_by_hour") }}
qualify ROW_NUMBER() over (
ORDER BY
block_number DESC
) = 6
),
tbl AS (
SELECT
block_number
FROM
{{ ref("streamline__blocks") }}
WHERE
block_number IS NOT NULL
AND block_number <= (
SELECT
block_number
FROM
look_back
)
EXCEPT
SELECT
block_number
FROM
{{ ref("streamline__complete_confirmed_blocks") }}
WHERE
block_number IS NOT NULL
AND block_number <= (
SELECT
block_number
FROM
look_back
)
)
SELECT
PARSE_JSON(

View File

@ -3,20 +3,70 @@
post_hook = if_data_call_function(
func = "{{this.schema}}.udf_bulk_json_rpc(object_construct('sql_source', '{{this.identifier}}', 'external_table', 'debug_traceBlockByNumber', 'sql_limit', {{var('sql_limit','60000')}}, 'producer_batch_size', {{var('producer_batch_size','15000')}}, 'worker_batch_size', {{var('worker_batch_size','15000')}}, 'call_type', 'rest', 'exploded_key','[\"result\"]'))",
target = "{{this.schema}}.{{this.identifier}}"
)
),
tags = ['streamline_core_realtime']
) }}
WITH blocks AS (
WITH last_3_days AS (
SELECT
block_number
FROM
{{ ref("_max_block_by_date") }}
qualify ROW_NUMBER() over (
ORDER BY
block_number DESC
) = 3
),
blocks AS (
SELECT
block_number
FROM
{{ ref("streamline__blocks") }}
WHERE
(
block_number >= (
SELECT
block_number
FROM
last_3_days
)
)
EXCEPT
SELECT
block_number
FROM
{{ ref("streamline__complete_debug_traceBlockByNumber") }}
WHERE
(
block_number >= (
SELECT
block_number
FROM
last_3_days
)
)
),
all_blocks AS (
SELECT
block_number
FROM
blocks
UNION
SELECT
block_number
FROM
(
SELECT
block_number
FROM
{{ ref("_missing_traces") }}
UNION
SELECT
block_number
FROM
{{ ref("_unconfirmed_blocks") }}
)
)
SELECT
PARSE_JSON(
@ -35,13 +85,13 @@ SELECT
' ',
''
),
'",{"tracer": "callTracer", "timeout": "30s"}',
'",{"tracer": "callTracer","timeout": "30s"}',
'],"id":"',
block_number :: INTEGER,
'"}'
)
) AS request
FROM
blocks
all_blocks
ORDER BY
block_number ASC
block_number ASC

View File

@ -3,20 +3,75 @@
post_hook = if_data_call_function(
func = "{{this.schema}}.udf_bulk_json_rpc(object_construct('sql_source', '{{this.identifier}}', 'external_table', 'qn_getBlockWithReceipts', 'sql_limit', {{var('sql_limit','50000')}}, 'producer_batch_size', {{var('producer_batch_size','25000')}}, 'worker_batch_size', {{var('worker_batch_size','12500')}}, 'batch_call_limit', {{var('batch_call_limit','10')}}, 'call_type', 'batch'))",
target = "{{this.schema}}.{{this.identifier}}"
)
),
tags = ['streamline_core_realtime']
) }}
WITH blocks AS (
WITH last_3_days AS (
SELECT
block_number
FROM
{{ ref("_max_block_by_date") }}
qualify ROW_NUMBER() over (
ORDER BY
block_number DESC
) = 3
),
blocks AS (
SELECT
block_number :: STRING AS block_number
FROM
{{ ref("streamline__blocks") }}
WHERE
(
block_number >= (
SELECT
block_number
FROM
last_3_days
)
)
EXCEPT
SELECT
block_number :: STRING
FROM
{{ ref("streamline__complete_qn_getBlockWithReceipts") }}
WHERE
(
block_number >= (
SELECT
block_number
FROM
last_3_days
)
)
),
all_blocks AS (
SELECT
block_number
FROM
blocks
UNION
SELECT
block_number
FROM
(
SELECT
block_number
FROM
{{ ref("_missing_receipts") }}
UNION
SELECT
block_number
FROM
{{ ref("_missing_txs") }}
UNION
SELECT
block_number
FROM
{{ ref("_unconfirmed_blocks") }}
)
)
SELECT
PARSE_JSON(
@ -41,6 +96,6 @@ SELECT
)
) AS request
FROM
blocks
all_blocks
ORDER BY
block_number ASC

View File

@ -0,0 +1,32 @@
{{ config (
materialized = "ephemeral"
) }}
WITH lookback AS (
SELECT
MAX(block_number) AS block_number
FROM
{{ ref("silver__blocks2") }}
WHERE
block_timestamp :: DATE = CURRENT_DATE() - 3
)
SELECT
DISTINCT t.block_number AS block_number
FROM
{{ ref("silver__transactions2") }}
t
LEFT JOIN {{ ref("silver__receipts") }}
r USING (
block_number,
block_hash,
tx_hash
)
WHERE
r.tx_hash IS NULL
AND t.block_number >= (
SELECT
block_number
FROM
lookback
)

View File

@ -0,0 +1,25 @@
{{ config (
materialized = "ephemeral"
) }}
SELECT
DISTINCT tx.block_number block_number
FROM
{{ ref("silver__transactions2") }}
tx
LEFT JOIN {{ ref("silver__traces2") }}
tr
ON tx.block_number = tr.block_number
AND tx.tx_hash = tr.tx_hash
AND tr.block_timestamp >= DATEADD(
'day',
-2,
CURRENT_DATE
)
WHERE
tx.block_timestamp >= DATEADD(
'day',
-2,
CURRENT_DATE
)
AND tr.tx_hash IS NULL

View File

@ -0,0 +1,32 @@
{{ config (
materialized = "ephemeral"
) }}
WITH transactions AS (
SELECT
block_number,
POSITION,
LAG(
POSITION,
1
) over (
PARTITION BY block_number
ORDER BY
POSITION ASC
) AS prev_POSITION
FROM
{{ ref("silver__transactions2") }}
WHERE
block_timestamp >= DATEADD(
'day',
-2,
CURRENT_DATE
)
)
SELECT
DISTINCT block_number AS block_number
FROM
transactions
WHERE
POSITION - prev_POSITION <> 1

View File

@ -0,0 +1,32 @@
{{ config (
materialized = "ephemeral"
) }}
WITH lookback AS (
SELECT
MAX(block_number) AS block_number
FROM
{{ ref("silver__blocks2") }}
WHERE
block_timestamp :: DATE = CURRENT_DATE() - 3
)
SELECT
DISTINCT cb.block_number AS block_number
FROM
{{ ref("silver__confirmed_blocks") }}
cb
LEFT JOIN {{ ref("silver__transactions2") }}
txs USING (
block_number,
block_hash,
tx_hash
)
WHERE
txs.tx_hash IS NULL
AND cb.block_number >= (
SELECT
block_number
FROM
lookback
)

View File

@ -30,6 +30,7 @@ sources:
- name: apis_keys
- name: token_prices_priority_hourly
- name: asset_metadata_priority
- name: number_sequence
- name: crosschain_public
database: crosschain
schema: bronze_public

View File

@ -0,0 +1 @@
{{ missing_confirmed_txs(ref("test_silver__confirmed_blocks_full"), ref("test_silver__transactions_full")) }}

View File

@ -0,0 +1,2 @@
-- depends_on: {{ ref('test_silver__transactions_full') }}
{{ missing_txs(ref("test_silver__receipts_full")) }}

View File

@ -0,0 +1,2 @@
-- depends_on: {{ ref('test_silver__transactions_full') }}
{{ missing_txs(ref("test_silver__traces_full")) }}

View File

@ -0,0 +1 @@
{{ missing_confirmed_txs(ref("test_silver__confirmed_blocks_recent"), ref("test_silver__transactions_recent")) }}

View File

@ -0,0 +1,2 @@
-- depends_on: {{ ref('test_silver__transactions_recent') }}
{{ recent_missing_txs(ref("test_silver__receipts_recent")) }}

View File

@ -0,0 +1,2 @@
-- depends_on: {{ ref('test_silver__transactions_recent') }}
{{ recent_missing_txs(ref("test_silver__traces_recent")) }}