sample format

This commit is contained in:
Austin 2025-11-10 14:51:49 -05:00
parent fe7e67c194
commit 256b321030
13 changed files with 208 additions and 6 deletions

View File

@ -15,6 +15,7 @@ sources:
- name: defillama_perp_metrics
- name: tt_projects
- name: tt_metrics
- name: tt_project_metrics
- name: tokenflow_eth
database: flipside_prod_db
schema: tokenflow_eth

View File

@ -1,6 +1,6 @@
{{ config (
materialized = 'view',
tags = ['tt_streamline']
tags = ['tt_streamline_bronze']
) }}
{{ streamline_external_table_query_v2(
model = 'tt_metrics',

View File

@ -1,6 +1,6 @@
{{ config (
materialized = 'view',
tags = ['tt_streamline']
tags = ['tt_streamline_bronze']
) }}
{{ streamline_external_table_FR_query_v2(
model = 'tt_metrics',

View File

@ -0,0 +1,9 @@
{{ config (
materialized = 'view',
tags = ['tt_streamline_bronze']
) }}
{{ streamline_external_table_query_v2(
model = 'tt_project_metrics',
partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER)",
partition_name = "partition_key"
) }}

View File

@ -0,0 +1,9 @@
{{ config (
materialized = 'view',
tags = ['tt_streamline_bronze']
) }}
{{ streamline_external_table_FR_query_v2(
model = 'tt_project_metrics',
partition_function = "CAST(SPLIT_PART(SPLIT_PART(file_name, '/', 4), '_', 1) AS INTEGER)",
partition_name = "partition_key"
) }}

View File

@ -1,6 +1,6 @@
{{ config (
materialized = 'view',
tags = ['tt_streamline']
tags = ['tt_streamline_bronze']
) }}
{{ streamline_external_table_query_v2(
model = 'tt_projects',

View File

@ -1,6 +1,6 @@
{{ config (
materialized = 'view',
tags = ['tt_streamline']
tags = ['tt_streamline_bronze']
) }}
{{ streamline_external_table_FR_query_v2(
model = 'tt_projects',

View File

@ -0,0 +1,71 @@
{{ config(
materialized = 'incremental',
unique_key = 'fact_project_metrics_id',
tags = ['tt']
) }}
SELECT
metric_timestamp,
project_id,
data:project_name::STRING AS project_name,
ifnull(data:active_addresses_daily::STRING::INT, 0) AS active_addresses_daily,
ifnull(data:active_addresses_monthly::STRING::INT, 0) AS active_addresses_monthly,
ifnull(data:active_addresses_weekly::STRING::INT, 0) AS active_addresses_weekly,
ifnull(data:active_developers::STRING::INT, 0) AS active_developers,
ifnull(data:active_loans::STRING::FLOAT, 0) AS active_loans,
ifnull(data:afpu::STRING::FLOAT, 0) AS afpu,
ifnull(data:arpu::STRING::FLOAT, 0) AS arpu,
ifnull(data:asset_market_cap_circulating::STRING::INT, 0) AS asset_market_cap_circulating,
ifnull(data:code_commits::STRING::INT, 0) AS code_commits,
ifnull(data:earnings::STRING::FLOAT, 0) AS earnings,
ifnull(data:expenses::STRING::FLOAT, 0) AS expenses,
ifnull(data:fees::STRING::FLOAT, 0) AS fees,
ifnull(data:fees_supply_side::STRING::FLOAT, 0) AS fees_supply_side,
ifnull(data:gas_used::STRING::FLOAT, 0) AS gas_used,
ifnull(data:market_cap_circulating::STRING::FLOAT, 0) AS market_cap_circulating,
ifnull(data:market_cap_fully_diluted::STRING::FLOAT, 0) AS market_cap_fully_diluted,
ifnull(data:net_deposits::STRING::FLOAT, 0) AS net_deposits,
ifnull(data:outstanding_supply::STRING::FLOAT, 0) AS outstanding_supply,
ifnull(data:pf_circulating::STRING::FLOAT, 0) AS pf_circulating,
ifnull(data:pf_fully_diluted::STRING::FLOAT, 0) AS pf_fully_diluted,
ifnull(data:price::STRING::FLOAT, 0) AS price,
ifnull(data:ps_circulating::STRING::FLOAT, 0) AS ps_circulating,
ifnull(data:ps_fully_diluted::STRING::FLOAT, 0) AS ps_fully_diluted,
ifnull(data:revenue::STRING::FLOAT, 0) AS revenue,
ifnull(data:stablecoin_dau::STRING::INT, 0) AS stablecoin_dau,
ifnull(data:stablecoin_holders::STRING::INT, 0) AS stablecoin_holders,
ifnull(data:stablecoin_mau::STRING::INT, 0) AS stablecoin_mau,
ifnull(data:stablecoin_mints::STRING::INT, 0) AS stablecoin_mints,
ifnull(data:stablecoin_redemptions::STRING::INT, 0) AS stablecoin_redemptions,
ifnull(data:stablecoin_transfer_count::STRING::INT, 0) AS stablecoin_transfer_count,
ifnull(data:stablecoin_transfer_volume::STRING::INT, 0) AS stablecoin_transfer_volume,
ifnull(data:stablecoin_wau::STRING::INT, 0) AS stablecoin_wau,
ifnull(data:token_incentives::STRING::FLOAT, 0) AS token_incentives,
ifnull(data:token_supply_circulating::STRING::INT, 0) AS token_supply_circulating,
ifnull(data:token_supply_maximum::STRING::INT, 0) AS token_supply_maximum,
ifnull(data:token_trading_volume::STRING::FLOAT, 0) AS token_trading_volume,
ifnull(data:token_turnover_circulating::STRING::FLOAT, 0) AS token_turnover_circulating,
ifnull(data:token_turnover_fully_diluted::STRING::FLOAT, 0) AS token_turnover_fully_diluted,
ifnull(data:tokenholders::STRING::INT, 0) AS tokenholders,
ifnull(data:transaction_count_contracts::STRING::INT, 0) AS transaction_count_contracts,
ifnull(data:treasury::STRING::FLOAT, 0) AS treasury,
ifnull(data:treasury_net::STRING::FLOAT, 0) AS treasury_net,
ifnull(data:tvl::STRING::FLOAT, 0) AS tvl,
ifnull(data:user_dau::STRING::INT, 0) AS user_dau,
ifnull(data:user_mau::STRING::INT, 0) AS user_mau,
ifnull(data:user_wau::STRING::INT, 0) AS user_wau,
data as project_metrics_data,
sysdate() as inserted_timestamp,
sysdate() as modified_timestamp,
'{{ invocation_id }}' as _invocation_id,
{{ dbt_utils.generate_surrogate_key(
['project_id','metric_timestamp']
) }} AS fact_project_metrics_id
FROM {{ ref('silver__tt_project_metrics') }}
WHERE 1=1
{% if is_incremental() %}
AND modified_timestamp > (
SELECT coalesce(MAX(modified_timestamp), '2000-01-01') FROM {{ this }}
)
{% endif %}
QUALIFY ROW_NUMBER() OVER (PARTITION BY fact_project_metrics_id ORDER BY modified_timestamp DESC) = 1

View File

@ -0,0 +1,35 @@
-- depends_on: {{ ref('bronze__tt_project_metrics') }}
{{ config(
materialized = 'incremental',
unique_key = 'tt_project_metrics_id',
cluster_by = ['metric_timestamp::DATE'],
tags = ['tt']
) }}
SELECT
value:"RUN_TIMESTAMP"::INT AS run_timestamp,
to_timestamp(run_timestamp)::date AS run_date,
partition_key,
DATA,
DATA:project_id::STRING AS project_id,
TO_TIMESTAMP(DATA:timestamp::STRING) AS metric_timestamp,
_inserted_timestamp,
sysdate() as inserted_timestamp,
sysdate() as modified_timestamp,
'{{ invocation_id }}' as _invocation_id,
{{ dbt_utils.generate_surrogate_key(
['project_id','metric_timestamp']
) }} AS tt_project_metrics_id
from
{% if is_incremental() %}
{{ ref('bronze__tt_project_metrics') }}
where _inserted_timestamp > (
select coalesce(max(_inserted_timestamp), '2000-01-01') from {{ this }}
)
{% else %}
{{ ref('bronze__tt_project_metrics_FR') }}
{% endif %}
QUALIFY(
ROW_NUMBER() OVER (PARTITION BY tt_project_metrics_id ORDER BY _inserted_timestamp DESC)
) = 1

View File

@ -0,0 +1,38 @@
{{ config (
materialized = "view",
post_hook = fsc_utils.if_data_call_function_v2(
func = 'streamline.udf_bulk_rest_api_v2',
target = "{{this.schema}}.{{this.identifier}}",
params ={ "external_table" :"tt_project_metrics",
"sql_limit" :"10000",
"producer_batch_size" :"10000",
"worker_batch_size" :"1000",
"async_concurrent_requests" :"10",
"sql_source" :"{{this.identifier}}",
"exploded_key": tojson(['data'])
}
),
tags = ['tt_streamline_history']
) }}
with relevant_projects as (
select
project_id
from {{ ref('token_terminal__dim_projects') }}
)
SELECT
project_id,
date_part('epoch_second', sysdate()) as run_timestamp,
date_part('epoch_second', sysdate()::DATE) AS partition_key,
{{ target.database }}.live.udf_api(
'GET',
'https://api.tokenterminal.com/v2/projects/' || project_id || '/metrics?order_direction=desc&include_project_specific_metrics=true',
OBJECT_CONSTRUCT(
'Authorization', 'Bearer {api_key}',
'fsc-quantum-state', 'streamline'
),
{},
'Vault/prod/external/token_terminal'
) AS request
from relevant_projects

View File

@ -12,7 +12,7 @@
"exploded_key": tojson(['data'])
}
),
tags = ['tt_streamline']
tags = ['tt_streamline_realtime']
) }}
SELECT

View File

@ -0,0 +1,39 @@
{{ config (
materialized = "view",
post_hook = fsc_utils.if_data_call_function_v2(
func = 'streamline.udf_bulk_rest_api_v2',
target = "{{this.schema}}.{{this.identifier}}",
params ={ "external_table" :"tt_project_metrics",
"sql_limit" :"10000",
"producer_batch_size" :"10000",
"worker_batch_size" :"1000",
"async_concurrent_requests" :"10",
"sql_source" :"{{this.identifier}}",
"exploded_key": tojson(['data'])
}
),
tags = ['tt_streamline_realtime']
) }}
with relevant_projects as (
select
project_id
from {{ ref('token_terminal__dim_projects') }}
where not is_archived -- only get non-archived projects that are getting new data
)
SELECT
project_id,
date_part('epoch_second', sysdate()) as run_timestamp,
date_part('epoch_second', sysdate()::DATE) AS partition_key,
{{ target.database }}.live.udf_api(
'GET',
'https://api.tokenterminal.com/v2/projects/' || project_id || '/metrics?order_direction=desc&include_project_specific_metrics=true&start=' || dateadd('day', -5, sysdate()::date),
OBJECT_CONSTRUCT(
'Authorization', 'Bearer {api_key}',
'fsc-quantum-state', 'streamline'
),
{},
'Vault/prod/external/token_terminal'
) AS request
from relevant_projects

View File

@ -12,7 +12,7 @@
"exploded_key": tojson(['data'])
}
),
tags = ['tt_streamline']
tags = ['tt_streamline_realtime']
) }}
SELECT