changes to get ready for prod

This commit is contained in:
Eric Laurello 2024-04-03 21:58:23 -04:00
parent aafba77656
commit 36dc7441f6
23 changed files with 189 additions and 317 deletions

View File

@ -43,5 +43,5 @@ jobs:
dbt deps
- name: Run DBT Jobs
run: |
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m 1+models/streamline/streamline__blocks_realtime.sql || true
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m 1+models/streamline/realtime/streamline__blocks_realtime.sql 1+models/streamline/realtime/streamline__tx_counts_realtime.sql || true
workstream report --exit-nonzero

View File

@ -43,5 +43,5 @@ jobs:
dbt deps
- name: Run DBT Jobs
run: |
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m 1+models/streamline/streamline__txs_realtime.sql || true
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m 1+models/streamline/realtime/streamline__transactions_realtime.sql || true
workstream report --exit-nonzero

View File

@ -1,49 +0,0 @@
name: dbt_run_streamline_validators
run-name: dbt_run_streamline_validators
on:
push:
branches:
# - main
- turn-off-dev-turn-on-prod
schedule:
# Runs "every 6 hours" (see https://crontab.guru)
- cron: '0 0,12,23 * * *'
env:
USE_VARS: "${{ vars.USE_VARS }}"
DBT_PROFILES_DIR: "${{ vars.DBT_PROFILES_DIR }}"
DBT_VERSION: "${{ vars.DBT_VERSION }}"
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "${{ vars.PYTHON_VERSION }}"
cache: "pip"
- name: install dependencies
run: |
pip3 install dbt-snowflake==${{ vars.DBT_VERSION }} cli_passthrough requests click
dbt deps
- name: Run DBT Jobs
run: |
dbt run --full-refresh -x -m 1+models/streamline/streamline__validators_realtime.sql

View File

@ -36,16 +36,6 @@
{%- endif %};
{% endmacro %}
{% macro create_udf_bulk_json_rpc() %}
CREATE
OR REPLACE EXTERNAL FUNCTION streamline.udf_bulk_json_rpc(
json variant
) returns text api_integration = {% if target.name == "prod" %}
aws_axelar_api AS ''
{% else %}
aws_axelar_api_dev AS 'https://q8knm7tyk5.execute-api.us-east-1.amazonaws.com/dev/udf_bulk_json_rpc'
{%- endif %};
{% endmacro %}
{% macro create_udf_rest_api() %}
CREATE

View File

@ -10,7 +10,7 @@
SELECT
VALUE,
_partition_by_block_id,
block_number AS block_id,
DATA :result :block :header :height :: INT AS block_id,
metadata,
DATA,
TO_TIMESTAMP(
@ -36,6 +36,6 @@ WHERE
)
{% endif %}
qualify(ROW_NUMBER() over (PARTITION BY block_number
qualify(ROW_NUMBER() over (PARTITION BY block_id
ORDER BY
_inserted_timestamp DESC)) = 1

View File

@ -10,9 +10,12 @@
SELECT
VALUE,
_partition_by_block_id,
block_number AS block_id,
DATA :height :: INT AS block_id,
REPLACE(
metadata :request :params [0],
COALESCE(
metadata :request :data :params [0],
metadata :request :params [0]
),
'tx.height='
) :: INT AS block_id_requested,
metadata,
@ -21,7 +24,7 @@ SELECT
DATA :tx_result AS tx_result,
file_name,
TO_TIMESTAMP(
m._inserted_timestamp
_inserted_timestamp
) AS _inserted_timestamp
FROM
@ -31,8 +34,6 @@ FROM
{{ ref('bronze__streamline_FR_transactions') }}
{% endif %}
m
{% if is_incremental() %}
WHERE
_inserted_timestamp >= (

View File

@ -1,9 +1,27 @@
{{ config (
materialized = 'view'
) }}
{{ streamline_external_table_FR_query(
"blocks",
partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 3), '_', 1) AS INTEGER )",
partition_name = "_partition_by_block_id",
unique_key = "block_number"
) }}
SELECT
partition_key,
DATA,
_INSERTED_TIMESTAMP,
id,
metadata,
file_name,
_PARTITION_BY_BLOCK_ID,
VALUE
FROM
{{ ref('bronze__streamline_FR_transactions_v2') }}
UNION ALL
SELECT
block_number,
DATA,
_INSERTED_TIMESTAMP,
id,
metadata,
file_name,
_PARTITION_BY_BLOCK_ID,
VALUE
FROM
{{ ref('bronze__streamline_FR_transactions_v1') }}

View File

@ -2,8 +2,8 @@
materialized = 'view'
) }}
{{ streamline_external_table_FR_query(
model = "blocks",
partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 3), '_', 1) AS INTEGER)",
"blocks",
partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 3), '_', 1) AS INTEGER )",
partition_name = "_partition_by_block_id",
unique_key = "block_number"
) }}

View File

@ -2,8 +2,8 @@
materialized = 'view'
) }}
{{ streamline_external_table_FR_query(
model = "txs_details",
partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 3), '_', 1) AS INTEGER)",
"blocks_v2",
partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 3), '_', 1) AS INTEGER )",
partition_name = "_partition_by_block_id",
unique_key = "block_number"
unique_key = "partition_key"
) }}

View File

@ -2,8 +2,8 @@
materialized = 'view'
) }}
{{ streamline_external_table_FR_query(
"txs",
"txs_v2",
partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 3), '_', 1) AS INTEGER )",
partition_name = "_partition_by_block_id",
unique_key = "block_number"
unique_key = "partition_key"
) }}

View File

@ -2,8 +2,8 @@
materialized = 'view'
) }}
{{ streamline_external_table_FR_query(
model = "validators",
partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 3), '_', 1) AS INTEGER)",
"txs_details",
partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 3), '_', 1) AS INTEGER )",
partition_name = "_partition_by_block_id",
unique_key = "block_number"
) }}

View File

@ -0,0 +1,9 @@
{{ config (
materialized = 'view'
) }}
{{ streamline_external_table_FR_query(
"txs_v2",
partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 3), '_', 1) AS INTEGER )",
partition_name = "_partition_by_block_id",
unique_key = "partition_key"
) }}

View File

@ -1,23 +1,34 @@
-- depends_on: {{ ref('bronze__streamline_blocks') }}
{{ config (
materialized = "incremental",
unique_key = "id",
unique_key = 'block_number',
cluster_by = "ROUND(block_number, -3)",
merge_update_columns = ["id"],
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(id)"
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(block_number)"
) }}
SELECT
id,
block_number,
_inserted_timestamp
DATA :result :block :header :height :: INT AS block_number,
{{ dbt_utils.generate_surrogate_key(
['block_number']
) }} AS complete_blocks_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp,
_inserted_timestamp,
'{{ invocation_id }}' AS _invocation_id
FROM
{{ ref('bronze__streamline_blocks') }}
{% if is_incremental() %}
{{ ref('bronze__streamline_blocks') }}
WHERE
_inserted_timestamp >= (
SELECT
COALESCE(MAX(_INSERTED_TIMESTAMP), '1970-01-01' :: DATE) max_INSERTED_TIMESTAMP
FROM
{{ this }}) qualify(ROW_NUMBER() over (PARTITION BY id
{{ this }})
{% else %}
{{ ref('bronze__streamline_FR_blocks') }}
{% endif %}
qualify(ROW_NUMBER() over (PARTITION BY block_number
ORDER BY
_inserted_timestamp DESC)) = 1

View File

@ -1,17 +1,24 @@
-- depends_on: {{ ref('bronze__streamline_transactions') }}
{{ config (
materialized = "incremental",
unique_key = "id",
unique_key = 'complete_transactions_id',
cluster_by = "ROUND(block_number, -3)",
merge_update_columns = ["id"],
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(id)"
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(block_number)"
) }}
SELECT
id,
block_number,
metadata :request [2] AS page,
_inserted_timestamp
DATA :height :: INT AS block_number,
COALESCE(
metadata :request :data :params [2],
metadata :request :params [2]
) :: INT AS page,
{{ dbt_utils.generate_surrogate_key(
['block_number','page']
) }} AS complete_transactions_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp,
_inserted_timestamp,
'{{ invocation_id }}' AS _invocation_id
FROM
{% if is_incremental() %}
@ -22,11 +29,10 @@ WHERE
COALESCE(MAX(_INSERTED_TIMESTAMP), '1970-01-01' :: DATE) max_INSERTED_TIMESTAMP
FROM
{{ this }})
)
{% else %}
{{ ref('bronze__streamline_FR_transactions') }}
{% endif %}
{% else %}
{{ ref('bronze__streamline_FR_transactions') }}
{% endif %}
qualify(ROW_NUMBER() over (PARTITION BY id
ORDER BY
_inserted_timestamp DESC)) = 1
qualify(ROW_NUMBER() over (PARTITION BY complete_transactions_id
ORDER BY
_inserted_timestamp DESC)) = 1

View File

@ -1,32 +1,35 @@
-- depends_on: {{ ref('bronze__streamline_transactions') }}
{{ config (
materialized = "incremental",
unique_key = "id",
unique_key = "block_number",
cluster_by = "ROUND(block_number, -3)",
merge_update_columns = ["id"],
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(id)"
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(block_number)"
) }}
SELECT
id,
block_number,
DATA :: INTEGER AS tx_count,
_inserted_timestamp
DATA :height :: INT AS block_number,
DATA :: INT AS tx_count,
{{ dbt_utils.generate_surrogate_key(
['block_number']
) }} AS complete_tx_counts_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp,
_inserted_timestamp,
'{{ invocation_id }}' AS _invocation_id
FROM
{{ ref('bronze__streamline_tx_counts') }}
{% if is_incremental() %}
{{ ref('bronze__streamline_tx_counts') }}
WHERE
_inserted_timestamp >= (
SELECT
COALESCE(MAX(_INSERTED_TIMESTAMP), '1970-01-01' :: DATE) max_INSERTED_TIMESTAMP
FROM
{{ this }})
)
{% else %}
{{ ref('bronze__streamline_FR_tx_counts') }}
{% endif %}
{% else %}
{{ ref('bronze__streamline_FR_tx_counts') }}
{% endif %}
qualify(ROW_NUMBER() over (PARTITION BY id
ORDER BY
_inserted_timestamp DESC)) = 1
qualify(ROW_NUMBER() over (PARTITION BY block_number
ORDER BY
_inserted_timestamp DESC)) = 1

View File

@ -5,7 +5,7 @@
target = "{{this.schema}}.{{this.identifier}}"
)
) }}
-- depends_on: {{ ref('streamline__complete_blocks') }}
WITH blocks AS (
SELECT
@ -20,10 +20,6 @@ SELECT
FROM
{{ ref("streamline__complete_blocks") }}
{% endif %}
ORDER BY
1 DESC
LIMIT
1000
)
SELECT
ROUND(
@ -49,7 +45,7 @@ SELECT
block_number :: STRING
)
),
'vault/stg/axelar/node/mainnet'
'vault/prod/axelar/node/mainnet'
) AS request
FROM
blocks

View File

@ -5,57 +5,87 @@
target = "{{this.schema}}.{{this.identifier}}"
)
) }}
-- depends_on: {{ ref('streamline__complete_transactions') }}
-- depends_on: {{ ref('streamline__complete_tx_counts') }}
WITH blocks AS (
SELECT
block_number
A.block_number,
tx_count
FROM
{{ ref("streamline__blocks") }}
{{ ref("streamline__complete_tx_counts") }} A
{% if is_incremental() %}
EXCEPT
SELECT
block_number
FROM
{{ ref("streamline__complete_transactions") }}
LEFT JOIN {{ ref("streamline__complete_transactions") }}
b
ON A.block_number = b.block_number
WHERE
b.block_number IS NULL
{% endif %}
ORDER BY
1 DESC
)
SELECT
ROUND(
block_number,
-3
) AS partition_key,
live.udf_api(
'POST',
'{service}/{Authentication}',
OBJECT_CONSTRUCT(
'Content-Type',
'application/json'
LIMIT
10
), numbers AS (
-- Recursive CTE to generate numbers. We'll use the maximum txcount value to limit our recursion.
SELECT
1 AS n
UNION ALL
SELECT
n + 1
FROM
numbers
WHERE
n < (
SELECT
CEIL(MAX(tx_count) / 100.0)
FROM
blocks)
),
OBJECT_CONSTRUCT(
'id',
blocks_with_page_numbers AS (
SELECT
tt.block_number AS block_number,
n.n AS page_number
FROM
blocks tt
JOIN numbers n
ON n.n <= CASE
WHEN tt.tx_count % 100 = 0 THEN tt.tx_count / 100
ELSE FLOOR(
tt.tx_count / 100
) + 1
END
)
SELECT
ROUND(
block_number,
'jsonrpc',
'2.0',
'method',
'tx_search',
'params',
ARRAY_CONSTRUCT(
'tx.height=' || block_number :: STRING,
TRUE,
'1',
--replace with page
'100',
'asc',
FALSE
)
),
'vault/stg/axelar/node/mainnet'
) AS request
FROM
blocks
ORDER BY
block_number
-3
) AS partition_key,
live.udf_api(
'POST',
'{service}/{Authentication}',
OBJECT_CONSTRUCT(
'Content-Type',
'application/json'
),
OBJECT_CONSTRUCT(
'id',
block_number,
'jsonrpc',
'2.0',
'method',
'tx_search',
'params',
ARRAY_CONSTRUCT(
'tx.height=' || block_number :: STRING,
TRUE,
page_number :: STRING,
'100',
'asc',
FALSE
)
),
'vault/prod/axelar/node/mainnet'
) AS request
FROM
blocks_with_page_numbers
ORDER BY
block_number

View File

@ -5,7 +5,7 @@
target = "{{this.schema}}.{{this.identifier}}"
)
) }}
-- depends_on: {{ ref('streamline__complete_tx_counts') }}
WITH blocks AS (
SELECT
@ -20,10 +20,6 @@ SELECT
FROM
{{ ref("streamline__complete_tx_counts") }}
{% endif %}
ORDER BY
1 DESC
LIMIT
100
)
SELECT
ROUND(
@ -54,7 +50,7 @@ SELECT
FALSE
)
),
'vault/stg/axelar/node/mainnet'
'vault/prod/axelar/node/mainnet'
) AS request
FROM
blocks

View File

@ -4,7 +4,7 @@
) }}
{% if execute %}
{% set height = run_query("SELECT streamline.udf_get_chainhead()") %}
{% set height = run_query("SELECT live.udf_api( 'POST', '{service}/{Authentication}', OBJECT_CONSTRUCT( 'Content-Type', 'application/json' ), OBJECT_CONSTRUCT( 'id', 0, 'jsonrpc', '2.0', 'method', 'status', 'params', [] ), 'vault/stg/axelar/node/mainnet' ):data:result:sync_info:latest_block_height::INT as block") %}
{% set block_height = height.columns [0].values() [0] %}
{% else %}
{% set block_height = 0 %}

View File

@ -1,28 +0,0 @@
{{ config (
materialized = "view",
post_hook = if_data_call_function(
func = "{{this.schema}}.udf_bulk_get_blocks(object_construct('sql_source', '{{this.identifier}}'))",
target = "{{this.schema}}.{{this.identifier}}"
)
) }}
{% if execute %}
{% set height = run_query('SELECT streamline.udf_get_chainhead()') %}
{% set block_height = height.columns [0].values() [0] %}
{% else %}
{% set block_height = 0 %}
{% endif %}
SELECT
height AS block_number
FROM
TABLE(streamline.udtf_get_base_table({{ block_height }}))
EXCEPT
SELECT
block_number
FROM
{{ ref(
"streamline__blocks_history"
) }}
ORDER BY
1 ASC

View File

@ -1,29 +0,0 @@
{{ config (
materialized = "view",
post_hook = if_data_call_function(
func = "{{this.schema}}.udf_bulk_get_txs(object_construct('sql_source', '{{this.identifier}}'))",
target = "{{this.schema}}.{{this.identifier}}"
)
) }}
WITH last_3_days AS (
SELECT
block_number
FROM
{{ ref("_block_lookback") }}
)
SELECT
A.block_number,
A.tx_count
FROM
{{ ref("streamline__blocks_history") }} A {# LEFT JOIN {{ ref("streamline__txs_history") }}
b
ON A.block_number = b.block_number #}
WHERE
block_number IN (
11449803,
11449804
)
ORDER BY
1 ASC

View File

@ -1,54 +0,0 @@
{{ config (
materialized = "incremental",
unique_key = "id",
cluster_by = "ROUND(block_number, -3)",
merge_update_columns = ["id"]
) }}
WITH meta AS (
SELECT
last_modified,
file_name
FROM
TABLE(
information_schema.external_table_files(
table_name => '{{ source( "bronze", "validators") }}'
)
) A
)
{% if is_incremental() %},
max_date AS (
SELECT
COALESCE(MAX(_INSERTED_TIMESTAMP), '1970-01-01' :: DATE) max_INSERTED_TIMESTAMP
FROM
{{ this }})
{% endif %}
SELECT
{{ dbt_utils.generate_surrogate_key(
['block_number']
) }} AS id,
block_number,
last_modified AS _inserted_timestamp
FROM
{{ source(
"bronze",
"validators"
) }}
JOIN meta b
ON b.file_name = metadata$filename
{% if is_incremental() %}
WHERE
b.last_modified > (
SELECT
max_INSERTED_TIMESTAMP
FROM
max_date
)
{% endif %}
qualify(ROW_NUMBER() over (PARTITION BY id
ORDER BY
_inserted_timestamp DESC)) = 1

View File

@ -1,28 +0,0 @@
{{ config (
materialized = "view",
post_hook = if_data_call_function(
func = "{{this.schema}}.udf_bulk_get_validators(object_construct('sql_source', '{{this.identifier}}'))",
target = "{{this.schema}}.{{this.identifier}}"
)
) }}
{% if execute %}
{% set height = run_query('SELECT streamline.udf_get_chainhead()') %}
{% set block_height = height.columns [0].values() [0] %}
{% else %}
{% set block_height = 0 %}
{% endif %}
SELECT
height AS block_number
FROM
TABLE(streamline.udtf_get_base_table({{ block_height }}))
EXCEPT
SELECT
block_number
FROM
{{ ref(
"streamline__validators_history"
) }}
ORDER BY
1 ASC