Add streamline models (#136)

* updated and added the rest api client and call

* updated

* increased sql limit

* updated params

* refresh

* updated

* updated the rest api call format

* updated

* updated

* updated
This commit is contained in:
xiuy001 2023-09-26 16:03:26 -04:00 committed by GitHub
parent 59f72a11d1
commit e4c9432131
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
24 changed files with 812 additions and 2 deletions

View File

@ -0,0 +1,34 @@
name: dbt_run_streamline_blocks_txcount_realtime
run-name: dbt_run_streamline_blocks_txcount_realtime
on:
workflow_dispatch:
schedule:
# Runs "at minute 25 and 55, every hour" (see https://crontab.guru)
- cron: '0,30 * * * *'
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
called_workflow_template:
uses: FlipsideCrypto/analytics-workflow-templates/.github/workflows/dbt_run_template.yml@main
with:
dbt_command: |
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m 1+models/streamline/core/realtime/streamline__txcount_realtime.sql 1+models/streamline/core/realtime/streamline__blocks_realtime.sql
environment: workflow_prod
warehouse: ${{ vars.WAREHOUSE }}
secrets: inherit

View File

@ -0,0 +1,34 @@
name: dbt_run_streamline_history
run-name: dbt_run_streamline_history
on:
workflow_dispatch:
schedule:
# Runs "every 4 hours" (see https://crontab.guru)
- cron: '0 */4 * * *'
env:
USE_VARS: "${{ vars.USE_VARS }}"
DBT_PROFILES_DIR: "${{ vars.DBT_PROFILES_DIR }}"
DBT_VERSION: "${{ vars.DBT_VERSION }}"
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
called_workflow_template:
uses: FlipsideCrypto/analytics-workflow-templates/.github/workflows/dbt_run_template.yml@main
with:
dbt_command: |
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m 1+models/streamline/core/realtime/
environment: workflow_prod
warehouse: ${{ vars.WAREHOUSE }}
secrets: inherit

View File

@ -0,0 +1,34 @@
name: dbt_run_streamline_transactions_realtime
run-name: dbt_run_streamline_transactions_realtime
on:
workflow_dispatch:
schedule:
# Runs "at minute 10 and 40, every hour" (see https://crontab.guru)
- cron: '10,40 * * * *'
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
called_workflow_template:
uses: FlipsideCrypto/analytics-workflow-templates/.github/workflows/dbt_run_template.yml@main
with:
dbt_command: |
dbt run --vars '{"STREAMLINE_INVOKE_STREAMS":True}' -m 1+models/streamline/core/realtime/streamline__tx_search_realtime.sql
environment: workflow_prod
warehouse: ${{ vars.WAREHOUSE }}
secrets: inherit

View File

@ -43,6 +43,11 @@ vars:
"dbt_date:time_zone": GMT
"UPDATE_SNOWFLAKE_TAGS": TRUE
OBSERV_FULL_TEST: FALSE
STREAMLINE_INVOKE_STREAMS: False
STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES: False
STREAMLINE_RUN_HISTORY: False
UPDATE_UDFS_AND_SPS: False
UPDATE_SNOWFLAKE_TAGS: True
tests:
+store_failures: true # all tests

View File

@ -0,0 +1,17 @@
{% macro create_aws_osmosis_api() %}
{% if target.name == "prod" %}
{% set sql %}
CREATE api integration IF NOT EXISTS aws_osmosis_api api_provider = aws_api_gateway api_aws_role_arn = 'arn:aws:iam::490041342817:role/osmosis-api-prod-rolesnowflakeudfsAF733095-LHZ76F0KPYOE' api_allowed_prefixes = (
'https://99iu3zvgd9.execute-api.us-east-1.amazonaws.com/prod/'
) enabled = TRUE;
{% endset %}
{% do run_query(sql) %}
{% else %}
{% set sql %}
CREATE api integration IF NOT EXISTS aws_osmosis_dev_api api_provider = aws_api_gateway api_aws_role_arn = 'arn:aws:iam::490041342817:role/osmosis-api-dev-rolesnowflakeudfsAF733095-ZCB8M2UY95LZ' api_allowed_prefixes = (
'https://8lng1cjnel.execute-api.us-east-1.amazonaws.com/dev/'
) enabled = TRUE;
{% endset %}
{% do run_query(sql) %}
{% endif %}
{% endmacro %}

View File

@ -0,0 +1,22 @@
{% macro create_udtf_get_base_table(schema) %}
CREATE
OR REPLACE FUNCTION {{ schema }}.udtf_get_base_table(
max_height INTEGER
) returns TABLE (
height NUMBER
) AS $$ WITH base AS (
SELECT
ROW_NUMBER() over (
ORDER BY
SEQ4()
) AS id
FROM
TABLE(GENERATOR(rowcount => 100000000))
)
SELECT
id AS height
FROM
base
WHERE
id <= max_height $$;
{% endmacro %}

View File

@ -0,0 +1,179 @@
{% macro decode_logs_history(
start,
stop
) %}
WITH look_back AS (
SELECT
block_number
FROM
{{ ref("_max_block_by_date") }}
qualify ROW_NUMBER() over (
ORDER BY
block_number DESC
) = 1
)
SELECT
l.block_number,
l._log_id,
A.abi AS abi,
OBJECT_CONSTRUCT(
'topics',
l.topics,
'data',
l.data,
'address',
l.contract_address
) AS DATA
FROM
{{ ref("silver__logs") }}
l
INNER JOIN {{ ref("silver__complete_event_abis") }} A
ON A.parent_contract_address = l.contract_address
AND A.event_signature = l.topics[0]:: STRING
AND l.block_number BETWEEN A.start_block
AND A.end_block
WHERE
(
l.block_number BETWEEN {{ start }}
AND {{ stop }}
)
AND l.block_number <= (
SELECT
block_number
FROM
look_back
)
AND _log_id NOT IN (
SELECT
_log_id
FROM
{{ ref("streamline__complete_decode_logs") }}
WHERE
(
block_number BETWEEN {{ start }}
AND {{ stop }}
)
AND block_number <= (
SELECT
block_number
FROM
look_back
)
)
{% endmacro %}
{% macro streamline_external_table_query(
model,
partition_function,
partition_name,
unique_key
) %}
WITH meta AS (
SELECT
job_created_time AS _inserted_timestamp,
file_name,
{{ partition_function }} AS {{ partition_name }}
FROM
TABLE(
information_schema.external_table_file_registration_history(
start_time => DATEADD('day', -3, CURRENT_TIMESTAMP()),
table_name => '{{ source( "bronze_streamline", model) }}')
) A
)
SELECT
{{ unique_key }},
DATA,
_inserted_timestamp,
MD5(
CAST(
COALESCE(CAST({{ unique_key }} AS text), '' :: STRING) AS text
)
) AS id,
s.{{ partition_name }},
s.value AS VALUE
FROM
{{ source(
"bronze_streamline",
model
) }}
s
JOIN meta b
ON b.file_name = metadata$filename
AND b.{{ partition_name }} = s.{{ partition_name }}
WHERE
b.{{ partition_name }} = s.{{ partition_name }}
AND (
DATA :error :code IS NULL
OR DATA :error :code NOT IN (
'-32000',
'-32001',
'-32002',
'-32003',
'-32004',
'-32005',
'-32006',
'-32007',
'-32008',
'-32009',
'-32010'
)
)
{% endmacro %}
{% macro streamline_external_table_FR_query(
model,
partition_function,
partition_name,
unique_key
) %}
WITH meta AS (
SELECT
registered_on AS _inserted_timestamp,
file_name,
{{ partition_function }} AS {{ partition_name }}
FROM
TABLE(
information_schema.external_table_files(
table_name => '{{ source( "bronze_streamline", model) }}'
)
) A
)
SELECT
{{ unique_key }},
DATA,
_inserted_timestamp,
MD5(
CAST(
COALESCE(CAST({{ unique_key }} AS text), '' :: STRING) AS text
)
) AS id,
s.{{ partition_name }},
s.value AS VALUE
FROM
{{ source(
"bronze_streamline",
model
) }}
s
JOIN meta b
ON b.file_name = metadata$filename
AND b.{{ partition_name }} = s.{{ partition_name }}
WHERE
b.{{ partition_name }} = s.{{ partition_name }}
AND (
DATA :error :code IS NULL
OR DATA :error :code NOT IN (
'-32000',
'-32001',
'-32002',
'-32003',
'-32004',
'-32005',
'-32006',
'-32007',
'-32008',
'-32009',
'-32010'
)
)
{% endmacro %}

View File

@ -0,0 +1,37 @@
{% macro create_udf_get_chainhead() %}
{% if target.name == "prod" %}
CREATE OR REPLACE EXTERNAL FUNCTION streamline.udf_get_chainhead() returns variant api_integration = aws_osmosis_api AS
'https://99iu3zvgd9.execute-api.us-east-1.amazonaws.com/prod/get_chainhead'
{% else %}
CREATE OR REPLACE EXTERNAL FUNCTION streamline.udf_get_chainhead() returns variant api_integration = aws_osmosis_dev_api AS
'https://8lng1cjnel.execute-api.us-east-1.amazonaws.com/dev/get_chainhead'
{%- endif %};
{% endmacro %}
{% macro create_udf_bulk_json_rpc() %}
{% if target.name == "prod" %}
CREATE OR REPLACE EXTERNAL FUNCTION streamline.udf_bulk_json_rpc(
json OBJECT
) returns ARRAY api_integration = aws_osmosis_api AS
'https://99iu3zvgd9.execute-api.us-east-1.amazonaws.com/prod/udf_bulk_json_rpc'
{% else %}
CREATE OR REPLACE EXTERNAL FUNCTION streamline.udf_bulk_json_rpc(
json OBJECT
) returns ARRAY api_integration = aws_osmosis_dev_api AS
'https://8lng1cjnel.execute-api.us-east-1.amazonaws.com/dev/udf_bulk_json_rpc'
{%- endif %};
{% endmacro %}
{% macro create_udf_bulk_rest_api() %}
{% if target.name == "prod" %}
CREATE OR REPLACE EXTERNAL FUNCTION streamline.udf_bulk_rest_api(
json OBJECT
) returns ARRAY api_integration = aws_osmosis_api AS
'https://99iu3zvgd9.execute-api.us-east-1.amazonaws.com/prod/udf_bulk_rest_api'
{% else %}
CREATE OR REPLACE EXTERNAL FUNCTION streamline.udf_bulk_rest_api(
json OBJECT
) returns ARRAY api_integration = aws_osmosis_dev_api AS
'https://8lng1cjnel.execute-api.us-east-1.amazonaws.com/dev/udf_bulk_rest_api'
{%- endif %};
{% endmacro %}

78
macros/utils.sql Normal file
View File

@ -0,0 +1,78 @@
{% macro if_data_call_function(
func,
target
) %}
{% if var(
"STREAMLINE_INVOKE_STREAMS"
) %}
{% if execute %}
{{ log(
"Running macro `if_data_call_function`: Calling udf " ~ func ~ " on " ~ target,
True
) }}
{% endif %}
SELECT
{{ func }}
WHERE
EXISTS(
SELECT
1
FROM
{{ target }}
LIMIT
1
)
{% else %}
{% if execute %}
{{ log(
"Running macro `if_data_call_function`: NOOP",
False
) }}
{% endif %}
SELECT
NULL
{% endif %}
{% endmacro %}
{% macro if_data_call_wait() %}
{% if var(
"STREAMLINE_INVOKE_STREAMS"
) %}
{% set query %}
SELECT
1
WHERE
EXISTS(
SELECT
1
FROM
{{ model.schema ~ "." ~ model.alias }}
LIMIT
1
) {% endset %}
{% if execute %}
{% set results = run_query(
query
) %}
{% if results %}
{{ log(
"Waiting...",
info = True
) }}
{% set wait_query %}
SELECT
system$wait(
{{ var(
"WAIT",
600
) }}
) {% endset %}
{% do run_query(wait_query) %}
{% else %}
SELECT
NULL;
{% endif %}
{% endif %}
{% endif %}
{% endmacro %}

View File

@ -0,0 +1,11 @@
{{ config (
materialized = 'view'
) }}
{% set model = this.identifier.split("_") [-1] %}
{{ streamline_external_table_FR_query(
model,
partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 3), '_', 1) AS INTEGER)",
partition_name = "_partition_by_block_id",
unique_key = "block_number"
) }}

View File

@ -0,0 +1,11 @@
{{ config (
materialized = 'view'
) }}
{% set model = this.identifier.split("_") [-1] %}
{{ streamline_external_table_FR_query(
model,
partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 3), '_', 1) AS INTEGER)",
partition_name = "_partition_by_block_id",
unique_key = "block_number"
) }}

View File

@ -0,0 +1,11 @@
{{ config (
materialized = 'view'
) }}
{% set model = this.identifier.split("_") [-1] %}
{{ streamline_external_table_FR_query(
model,
partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 3), '_', 1) AS INTEGER )",
partition_name = "_partition_by_block_id",
unique_key = "block_number"
) }}

View File

@ -0,0 +1,11 @@
{{ config (
materialized = 'view'
) }}
{% set model = this.identifier.split("_") [-1] %}
{{ streamline_external_table_query(
model,
partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 3), '_', 1) AS INTEGER)",
partition_name = "_partition_by_block_id",
unique_key = "block_number"
) }}

View File

@ -0,0 +1,11 @@
{{ config (
materialized = 'view'
) }}
{% set model = this.identifier.split("_") [-1] %}
{{ streamline_external_table_query(
model,
partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 3), '_', 1) AS INTEGER)",
partition_name = "_partition_by_block_id",
unique_key = "block_number"
) }}

View File

@ -0,0 +1,11 @@
{{ config (
materialized = 'view'
) }}
{% set model = this.identifier.split("_") [-1] %}
{{ streamline_external_table_query(
model,
partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 3), '_', 1) AS INTEGER )",
partition_name = "_partition_by_block_id",
unique_key = "block_number"
) }}

View File

@ -18,12 +18,16 @@ sources:
- name: blockchain
- name: bronze_streamline
database: streamline
schema: osmosis
schema: |
{{ "OSMOSIS_DEV" if var("STREAMLINE_USE_DEV_FOR_EXTERNAL_TABLES", False) else "OSMOSIS" }}
tables:
- name: validator_metadata_api
- name: asset_metadata_api
- name: balances_api
- name: pool_balances_api
- name: blocks
- name: transactions
- name: txcount
- name: crosschain_silver
database: "{{ 'crosschain' if target.database == 'OSMOSIS' else 'crosschain_dev' }}"
schema: silver

View File

@ -0,0 +1,27 @@
{{ config (
materialized = "ephemeral",
unique_key = "block_id",
) }}
WITH base AS (
SELECT
block_timestamp :: DATE AS block_date,
MAX(block_id) as block_number
FROM
{{ ref("silver__blocks") }}
GROUP BY
block_timestamp :: DATE
)
SELECT
block_date,
block_number
FROM
base
WHERE
block_date <> (
SELECT
MAX(block_date)
FROM
base
)

View File

@ -0,0 +1,31 @@
-- depends_on: {{ ref('bronze__streamline_blocks') }}
{{ config (
materialized = "incremental",
unique_key = "id",
cluster_by = "ROUND(block_number, -3)",
merge_update_columns = ["id"],
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(id)"
) }}
SELECT
id,
block_number,
_inserted_timestamp
FROM
{% if is_incremental() %}
{{ ref('bronze__streamline_blocks') }}
WHERE
_inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) _inserted_timestamp
FROM
{{ this }}
)
{% else %}
{{ ref('bronze__streamline_FR_blocks') }}
{% endif %}
qualify(ROW_NUMBER() over (PARTITION BY id
ORDER BY
_inserted_timestamp DESC)) = 1

View File

@ -0,0 +1,32 @@
-- depends_on: {{ ref('bronze__streamline_transactions') }}
{{ config (
materialized = "incremental",
unique_key = "id",
cluster_by = "ROUND(block_number, -3)",
merge_update_columns = ["id"],
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(id)"
) }}
SELECT
id,
block_number,
VALUE :metadata :request :params ['pagination.offset'] ::STRING AS pagination_offset,
_inserted_timestamp
FROM
{% if is_incremental() %}
{{ ref('bronze__streamline_transactions') }}
WHERE
_inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) _inserted_timestamp
FROM
{{ this }}
)
{% else %}
{{ ref('bronze__streamline_FR_transactions') }}
{% endif %}
qualify(ROW_NUMBER() over (PARTITION BY id, pagination_offset
ORDER BY
_inserted_timestamp DESC)) = 1

View File

@ -0,0 +1,31 @@
-- depends_on: {{ ref('bronze__streamline_txcount') }}
{{ config (
materialized = "incremental",
unique_key = "id",
cluster_by = "ROUND(block_number, -3)",
merge_update_columns = ["id"],
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION on equality(id)"
) }}
SELECT
id,
block_number,
_inserted_timestamp
FROM
{% if is_incremental() %}
{{ ref('bronze__streamline_txcount') }}
WHERE
_inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) _inserted_timestamp
FROM
{{ this }}
)
{% else %}
{{ ref('bronze__streamline_FR_txcount') }}
{% endif %}
qualify(ROW_NUMBER() over (PARTITION BY id
ORDER BY
_inserted_timestamp DESC)) = 1

View File

@ -0,0 +1,35 @@
{{ config (
materialized = "view",
post_hook = if_data_call_function(
func = "{{this.schema}}.udf_bulk_json_rpc(object_construct('sql_source', '{{this.identifier}}', 'external_table', 'blocks', 'sql_limit', {{var('sql_limit','2000000')}}, 'producer_batch_size', {{var('producer_batch_size','2000000')}}, 'worker_batch_size', {{var('worker_batch_size','1000')}}, 'batch_call_limit', {{var('batch_call_limit','100')}}, 'call_type', 'batch'))",
target = "{{this.schema}}.{{this.identifier}}"
)
) }}
WITH blocks AS (
SELECT
block_number
FROM
{{ ref("streamline__blocks") }}
EXCEPT
SELECT
block_number
FROM
{{ ref("streamline__complete_blocks") }}
)
SELECT
block_number,
PARSE_JSON(
CONCAT(
'{"jsonrpc": "2.0",',
'"method": "block", "params":["',
block_number :: STRING,
'"],"id":"',
block_number :: STRING,
'"}'
)
) AS request
FROM
blocks
ORDER BY block_number DESC

View File

@ -0,0 +1,86 @@
{{ config (
materialized = "view",
post_hook = if_data_call_function(
func = "{{this.schema}}.udf_bulk_rest_api(object_construct('sql_source', '{{this.identifier}}', 'external_table', 'transactions', 'sql_limit', {{var('sql_limit','2000000')}}, 'producer_batch_size', {{var('producer_batch_size','2000')}}, 'worker_batch_size', {{var('worker_batch_size','200')}}, 'exploded_key', '[\"txs;tx_responses\"]'))",
target = "{{this.schema}}.{{this.identifier}}"
)
) }}
WITH blocks AS (
SELECT
block_number
FROM
{{ ref("streamline__complete_txcount") }}
EXCEPT
SELECT
block_number
FROM
{{ ref("streamline__complete_transactions") }}
),
transactions_counts_by_block AS (
SELECT
tc.block_number,
tc.data :: INTEGER AS txcount
FROM
{{ ref("bronze__streamline_FR_txcount") }}
tc
INNER JOIN blocks b
ON tc.block_number = b.block_number
),
numbers AS (
-- Recursive CTE to generate numbers. We'll use the maximum txcount value to limit our recursion.
SELECT
1 AS n
UNION ALL
SELECT
n + 1
FROM
numbers
WHERE
n < (
SELECT
CEIL(MAX(txcount) / 100.0)
FROM
transactions_counts_by_block)
),
blocks_with_page_numbers AS (
SELECT
tt.block_number AS block_number,
ROUND((n.n -1) * 100) :: STRING AS pagination_offset
FROM
transactions_counts_by_block tt
JOIN numbers n
ON n.n <= CASE
WHEN tt.txcount % 100 = 0 THEN tt.txcount / 100
ELSE FLOOR(
tt.txcount / 100
) + 1
END
),
blocks_with_page_numbers_to_read AS (
SELECT
block_number,
pagination_offset
FROM
blocks_with_page_numbers
EXCEPT
SELECT
block_number,
pagination_offset
FROM
{{ ref("streamline__complete_transactions") }}
)
SELECT
block_number,
ARRAY_CONSTRUCT(
'GET',
'/cosmos/tx/v1beta1/txs',
PARSE_JSON('{}'),
PARSE_JSON(CONCAT('{"params":{', '"events":"tx.height=', block_number :: STRING, '",', '"pagination.limit":"100"', ',', '"pagination.offset":"', pagination_offset, '"},"id":"', block_number :: STRING, '"}')),
''
) AS request
FROM
blocks_with_page_numbers_to_read
ORDER BY
block_number DESC

View File

@ -0,0 +1,41 @@
{{ config (
materialized = "view",
post_hook = if_data_call_function(
func = "{{this.schema}}.udf_bulk_json_rpc(object_construct('sql_source', '{{this.identifier}}', 'external_table', 'txcount', 'sql_limit', {{var('sql_limit','2000000')}}, 'producer_batch_size', {{var('producer_batch_size','10000')}}, 'worker_batch_size', {{var('worker_batch_size','1000')}}, 'batch_call_limit', {{var('batch_call_limit','100')}}, 'exploded_key', '[\"result\", \"total_count\"]', 'call_type', 'batch'))",
target = "{{this.schema}}.{{this.identifier}}"
)
) }}
WITH blocks AS (
SELECT
block_number
FROM
{{ ref("streamline__blocks") }}
EXCEPT
SELECT
block_number
FROM
{{ ref("streamline__complete_txcount") }}
)
SELECT
block_number,
PARSE_JSON(
CONCAT(
'{"jsonrpc": "2.0",',
'"method": "tx_search", "params":["',
'tx.height=',
block_number :: STRING,
'",',
TRUE,
',',
'"1",',
'"1",',
'"asc"',
'],"id":"',
block_number :: STRING,
'"}'
)
) AS request
FROM
blocks

View File

@ -0,0 +1,17 @@
{{ config (
materialized = "view",
tags = ['streamline_view']
) }}
{% if execute %}
{% set height = run_query('SELECT streamline.udf_get_chainhead()') %}
{% set block_height = height.columns[0].values()[0] %}
{% else %}
{% set block_height = 0 %}
{% endif %}
SELECT
height as block_number
FROM
TABLE(streamline.udtf_get_base_table({{block_height}}))