This commit is contained in:
Austin 2025-11-10 14:01:50 -05:00
parent fbc539e02b
commit fe7e67c194
11 changed files with 221 additions and 0 deletions

View File

@ -13,6 +13,8 @@ sources:
- name: defillama_stablecoin_metrics
- name: defillama_protocols
- name: defillama_perp_metrics
- name: tt_projects
- name: tt_metrics
- name: tokenflow_eth
database: flipside_prod_db
schema: tokenflow_eth

View File

@ -0,0 +1,9 @@
{{ config (
materialized = 'view',
tags = ['tt_streamline']
) }}
{{ streamline_external_table_query_v2(
model = 'tt_metrics',
partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER)",
partition_name = "partition_key"
) }}

View File

@ -0,0 +1,9 @@
{{ config (
materialized = 'view',
tags = ['tt_streamline']
) }}
{{ streamline_external_table_FR_query_v2(
model = 'tt_metrics',
partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER)",
partition_name = "partition_key"
) }}

View File

@ -0,0 +1,9 @@
{{ config (
materialized = 'view',
tags = ['tt_streamline']
) }}
{{ streamline_external_table_query_v2(
model = 'tt_projects',
partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER)",
partition_name = "partition_key"
) }}

View File

@ -0,0 +1,9 @@
{{ config (
materialized = 'view',
tags = ['tt_streamline']
) }}
{{ streamline_external_table_FR_query_v2(
model = 'tt_projects',
partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER)",
partition_name = "partition_key"
) }}

View File

@ -0,0 +1,25 @@
{{ config(
materialized = 'incremental',
unique_key = 'dim_metrics_id',
tags = ['tt']
) }}
SELECT
data:metric_id::STRING AS metric_id,
data:metric_name::STRING AS metric_name,
data:description::STRING AS description,
data:url::STRING AS url,
sysdate() as inserted_timestamp,
sysdate() as modified_timestamp,
'{{ invocation_id }}' as _invocation_id,
{{ dbt_utils.generate_surrogate_key(
['metric_id']
) }} AS dim_metrics_id
FROM {{ ref('silver__tt_metrics') }}
WHERE 1=1
{% if is_incremental() %}
AND modified_timestamp > (
SELECT coalesce(MAX(modified_timestamp), '2000-01-01') FROM {{ this }}
)
{% endif %}
QUALIFY ROW_NUMBER() OVER (PARTITION BY dim_metrics_id ORDER BY modified_timestamp DESC) = 1

View File

@ -0,0 +1,30 @@
{{ config(
materialized = 'incremental',
unique_key = 'dim_projects_id',
tags = ['tt']
) }}
SELECT
data:project_id::STRING AS project_id,
data:name::STRING AS project_name,
data:symbol::STRING AS project_symbol,
data:archived_reason::STRING AS archived_reason,
data:coingecko_id::STRING AS coingecko_id,
data:is_archived::BOOLEAN AS is_archived,
data:maintenance_reason::STRING AS maintenance_reason,
data:products::ARRAY AS products,
data:url::STRING AS url,
sysdate() as inserted_timestamp,
sysdate() as modified_timestamp,
'{{ invocation_id }}' as _invocation_id,
{{ dbt_utils.generate_surrogate_key(
['project_id']
) }} AS dim_projects_id
FROM {{ ref('silver__tt_projects') }}
WHERE 1=1
{% if is_incremental() %}
AND modified_timestamp > (
SELECT coalesce(MAX(modified_timestamp), '2000-01-01') FROM {{ this }}
)
{% endif %}
QUALIFY ROW_NUMBER() OVER (PARTITION BY dim_projects_id ORDER BY modified_timestamp DESC) = 1

View File

@ -0,0 +1,34 @@
-- depends_on: {{ ref('bronze__tt_metrics') }}
{{ config(
materialized = 'incremental',
unique_key = 'tt_metrics_id',
cluster_by = ['run_date'],
tags = ['tt']
) }}
SELECT
value:"RUN_TIMESTAMP"::INT AS run_timestamp,
to_timestamp(run_timestamp)::date AS run_date,
partition_key,
DATA,
DATA:metric_id::STRING AS metric_id,
_inserted_timestamp,
sysdate() as inserted_timestamp,
sysdate() as modified_timestamp,
'{{ invocation_id }}' as _invocation_id,
{{ dbt_utils.generate_surrogate_key(
['metric_id','run_timestamp']
) }} AS tt_metrics_id
from
{% if is_incremental() %}
{{ ref('bronze__tt_metrics') }}
where _inserted_timestamp > (
select coalesce(max(_inserted_timestamp), '2000-01-01') from {{ this }}
)
{% else %}
{{ ref('bronze__tt_metrics_FR') }}
{% endif %}
QUALIFY(
ROW_NUMBER() OVER (PARTITION BY tt_metrics_id ORDER BY _inserted_timestamp DESC)
) = 1

View File

@ -0,0 +1,34 @@
-- depends_on: {{ ref('bronze__tt_projects') }}
{{ config(
materialized = 'incremental',
unique_key = 'tt_projects_id',
cluster_by = ['run_date'],
tags = ['tt']
) }}
SELECT
value:"RUN_TIMESTAMP"::INT AS run_timestamp,
to_timestamp(run_timestamp)::date AS run_date,
partition_key,
DATA,
DATA:project_id::STRING AS project_id,
_inserted_timestamp,
sysdate() as inserted_timestamp,
sysdate() as modified_timestamp,
'{{ invocation_id }}' as _invocation_id,
{{ dbt_utils.generate_surrogate_key(
['project_id','run_timestamp']
) }} AS tt_projects_id
from
{% if is_incremental() %}
{{ ref('bronze__tt_projects') }}
where _inserted_timestamp > (
select coalesce(max(_inserted_timestamp), '2000-01-01') from {{ this }}
)
{% else %}
{{ ref('bronze__tt_projects_FR') }}
{% endif %}
QUALIFY(
ROW_NUMBER() OVER (PARTITION BY tt_projects_id ORDER BY _inserted_timestamp DESC)
) = 1

View File

@ -0,0 +1,30 @@
{{ config (
materialized = "view",
post_hook = fsc_utils.if_data_call_function_v2(
func = 'streamline.udf_bulk_rest_api_v2',
target = "{{this.schema}}.{{this.identifier}}",
params ={ "external_table" :"tt_metrics",
"sql_limit" :"1",
"producer_batch_size" :"1",
"worker_batch_size" :"1",
"async_concurrent_requests" :"1",
"sql_source" :"{{this.identifier}}",
"exploded_key": tojson(['data'])
}
),
tags = ['tt_streamline']
) }}
SELECT
date_part('epoch_second', sysdate()) as run_timestamp,
date_part('epoch_second', sysdate()::DATE) AS partition_key,
{{ target.database }}.live.udf_api(
'GET',
'https://api.tokenterminal.com/v2/metrics',
OBJECT_CONSTRUCT(
'Authorization', 'Bearer {api_key}',
'fsc-quantum-state', 'streamline'
),
{},
'Vault/prod/external/token_terminal'
) AS request

View File

@ -0,0 +1,30 @@
{{ config (
materialized = "view",
post_hook = fsc_utils.if_data_call_function_v2(
func = 'streamline.udf_bulk_rest_api_v2',
target = "{{this.schema}}.{{this.identifier}}",
params ={ "external_table" :"tt_projects",
"sql_limit" :"1",
"producer_batch_size" :"1",
"worker_batch_size" :"1",
"async_concurrent_requests" :"1",
"sql_source" :"{{this.identifier}}",
"exploded_key": tojson(['data'])
}
),
tags = ['tt_streamline']
) }}
SELECT
date_part('epoch_second', sysdate()) as run_timestamp,
date_part('epoch_second', sysdate()::DATE) AS partition_key,
{{ target.database }}.live.udf_api(
'GET',
'https://api.tokenterminal.com/v2/projects',
OBJECT_CONSTRUCT(
'Authorization', 'Bearer {api_key}',
'fsc-quantum-state', 'streamline'
),
{},
'Vault/prod/external/token_terminal'
) AS request