AN-2597/defillama-pipeline (#11)

This commit is contained in:
drethereum 2023-02-13 14:01:29 -07:00 committed by GitHub
parent 56d47f7d4a
commit 3253a52552
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
55 changed files with 12042 additions and 95 deletions

View File

@ -6,16 +6,16 @@ on:
- "main"
env:
DBT_PROFILES_DIR: ${{ secrets.DBT_PROFILES_DIR }}
DBT_PROFILES_DIR: ${{ vars.DBT_PROFILES_DIR }}
ACCOUNT: "${{ secrets.ACCOUNT }}"
ROLE: "${{ secrets.ROLE }}"
USER: "${{ secrets.USER }}"
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ secrets.REGION }}"
DATABASE: "${{ secrets.DATABASE }}"
WAREHOUSE: "${{ secrets.WAREHOUSE }}"
SCHEMA: "${{ secrets.SCHEMA }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
@ -36,7 +36,7 @@ jobs:
- name: install dependencies
run: |
pip install dbt-snowflake==${{ secrets.DBT_VERSION }}
pip install dbt-snowflake==${{ vars.DBT_VERSION }}
dbt deps
- name: checkout docs branch
run: |

View File

@ -29,25 +29,40 @@ on:
required: true
env:
DBT_PROFILES_DIR: ${{ secrets.DBT_PROFILES_DIR }}
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ secrets.ACCOUNT }}"
ROLE: "${{ secrets.ROLE }}"
USER: "${{ secrets.USER }}"
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ secrets.REGION }}"
DATABASE: "${{ secrets.DATABASE }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ inputs.warehouse }}"
SCHEMA: "${{ secrets.SCHEMA }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
called_workflow_template:
uses: FlipsideCrypto/analytics-workflow-templates/.github/workflows/dbt_run_template.yml@main
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_${{ inputs.environment }}
strategy:
matrix:
command: ${{fromJson(inputs.dbt_command)}}
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v1
with:
dbt_command: ${{ inputs.dbt_command }}
environment: workflow_${{ inputs.environment }}
warehouse: ${{ inputs.warehouse }}
secrets: inherit
python-version: "3.7.x"
- name: install dependencies
run: |
pip3 install dbt-snowflake==${{ vars.DBT_VERSION }} cli_passthrough requests click
dbt deps
- name: Run DBT Jobs
run: |
${{ matrix.command }}

44
.github/workflows/dbt_run_daily.yml vendored Normal file
View File

@ -0,0 +1,44 @@
name: dbt_run_scheduled_daily
run-name: dbt_run_scheduled_daily
on:
workflow_dispatch:
schedule:
# Runs "at 08:00 UTC" every day (see https://crontab.guru)
- cron: '0 8 * * *'
env:
DBT_PROFILES_DIR: ${{ vars.DBT_PROFILES_DIR }}
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v1
with:
python-version: "3.7.x"
- name: install dependencies
run: |
pip3 install dbt-snowflake==${{ vars.DBT_VERSION }} cli_passthrough requests click
dbt deps
- name: Run DBT Jobs
run: |
dbt run -m tag:defillama

View File

@ -8,16 +8,16 @@ on:
- cron: '0 8 * * *'
env:
DBT_PROFILES_DIR: ${{ secrets.DBT_PROFILES_DIR }}
DBT_PROFILES_DIR: ${{ vars.DBT_PROFILES_DIR }}
ACCOUNT: "${{ secrets.ACCOUNT }}"
ROLE: "${{ secrets.ROLE }}"
USER: "${{ secrets.USER }}"
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ secrets.REGION }}"
DATABASE: "${{ secrets.DATABASE }}"
WAREHOUSE: "${{ secrets.WAREHOUSE }}"
SCHEMA: "${{ secrets.SCHEMA }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
@ -37,7 +37,7 @@ jobs:
- name: install dependencies
run: |
pip3 install dbt-snowflake==${{ secrets.DBT_VERSION }} cli_passthrough requests click
pip3 install dbt-snowflake==${{ vars.DBT_VERSION }} cli_passthrough requests click
dbt deps
- name: Run DBT Jobs
run: |

View File

@ -1,45 +0,0 @@
name: dbt_run_scheduled
run-name: dbt_run_scheduled
on:
workflow_dispatch:
schedule:
# Runs "at 08:00 UTC on Monday and Wednesday" (see https://crontab.guru)
# Update schedule for additional models, where applicable
- cron: '0 8 * * 1,3'
env:
DBT_PROFILES_DIR: ${{ secrets.DBT_PROFILES_DIR }}
ACCOUNT: "${{ secrets.ACCOUNT }}"
ROLE: "${{ secrets.ROLE }}"
USER: "${{ secrets.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ secrets.REGION }}"
DATABASE: "${{ secrets.DATABASE }}"
WAREHOUSE: "${{ secrets.WAREHOUSE }}"
SCHEMA: "${{ secrets.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v1
with:
python-version: "3.7.x"
- name: install dependencies
run: |
pip3 install dbt-snowflake==${{ secrets.DBT_VERSION }} cli_passthrough requests click
dbt deps
- name: Run DBT Jobs
run: |
dbt run

44
.github/workflows/dbt_run_weekly.yml vendored Normal file
View File

@ -0,0 +1,44 @@
name: dbt_run_scheduled_weekly
run-name: dbt_run_scheduled_weekly
on:
workflow_dispatch:
schedule:
# Runs "at 08:00 UTC on Mondays" (see https://crontab.guru)
- cron: '0 8 * * 1'
env:
DBT_PROFILES_DIR: ${{ vars.DBT_PROFILES_DIR }}
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v1
with:
python-version: "3.7.x"
- name: install dependencies
run: |
pip3 install dbt-snowflake==${{ vars.DBT_VERSION }} cli_passthrough requests click
dbt deps
- name: Run DBT Jobs
run: |
dbt run --exclude tag:defillama

View File

@ -5,21 +5,21 @@ on:
workflow_dispatch:
branches:
- "main"
# schedule:
# # Runs "at 8:00 UTC" (see https://crontab.guru)
# - cron: '0 8 * * *'
schedule:
# Runs "at 9:00 UTC" (see https://crontab.guru)
- cron: '0 9 * * *'
env:
DBT_PROFILES_DIR: ${{ secrets.DBT_PROFILES_DIR }}
DBT_PROFILES_DIR: ${{ vars.DBT_PROFILES_DIR }}
ACCOUNT: "${{ secrets.ACCOUNT }}"
ROLE: "${{ secrets.ROLE }}"
USER: "${{ secrets.USER }}"
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ secrets.REGION }}"
DATABASE: "${{ secrets.DATABASE }}"
WAREHOUSE: "${{ secrets.WAREHOUSE }}"
SCHEMA: "${{ secrets.SCHEMA }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
@ -39,7 +39,7 @@ jobs:
- name: install dependencies
run: |
pip3 install dbt-snowflake==${{ secrets.DBT_VERSION }} cli_passthrough requests click
pip3 install dbt-snowflake==${{ vars.DBT_VERSION }} cli_passthrough requests click
dbt deps
- name: Run DBT Jobs
run: |

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,36 @@
{{ config(
materialized = 'incremental',
unique_key = 'bridge_id',
tags = ['defillama']
) }}
WITH bridge_base AS (
SELECT
ethereum.streamline.udf_api(
'GET','https://bridges.llama.fi/bridges?includeChains=true',{},{}
) AS read,
SYSDATE() AS _inserted_timestamp
)
SELECT
VALUE:id::STRING AS bridge_id,
VALUE:name::STRING AS bridge,
VALUE:chains AS chains,
CASE
WHEN VALUE:destinationChain::STRING ilike 'false' OR VALUE:destinationChain::STRING = '-' THEN NULL
ELSE VALUE:destinationChain::STRING
END AS destination_chain,
ROW_NUMBER() OVER (ORDER BY bridge) AS row_num,
_inserted_timestamp
FROM bridge_base,
LATERAL FLATTEN (input=> read:data:bridges)
{% if is_incremental() %}
WHERE bridge_id NOT IN (
SELECT
DISTINCT bridge_id
FROM
{{ this }}
)
{% endif %}

View File

@ -0,0 +1,23 @@
version: 2
models:
- name: silver__defillama_bridges
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- BRIDGE_ID
columns:
- name: BRIDGE_ID
tests:
- not_null
- name: BRIDGE
tests:
- not_null
- name: CHAINS
- name: DESTINATION_CHAIN
- name: _INSERTED_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_NTZ

View File

@ -0,0 +1,32 @@
{{ config(
materialized = 'incremental',
unique_key = 'chain',
tags = ['defillama']
) }}
WITH chain_base AS (
SELECT
ethereum.streamline.udf_api(
'GET','https://api.llama.fi/chains',{},{}
) AS read,
SYSDATE() AS _inserted_timestamp
)
SELECT
VALUE:chainId::STRING AS chain_id,
VALUE:name::STRING AS chain,
VALUE:tokenSymbol::STRING AS token_symbol,
ROW_NUMBER() OVER (ORDER BY chain) AS row_num,
_inserted_timestamp
FROM chain_base,
LATERAL FLATTEN (input=> read:data)
{% if is_incremental() %}
WHERE chain NOT IN (
SELECT
DISTINCT chain
FROM
{{ this }}
)
{% endif %}

View File

@ -0,0 +1,20 @@
version: 2
models:
- name: silver__defillama_chains
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- CHAIN
columns:
- name: CHAIN_ID
- name: CHAIN
tests:
- not_null
- name: TOKEN_SYMBOL
- name: _INSERTED_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_NTZ

View File

@ -0,0 +1,52 @@
{{ config(
materialized = 'incremental',
unique_key = 'dex_id',
tags = ['defillama']
) }}
WITH base AS (
SELECT
ethereum.streamline.udf_api(
'GET','https://api.llama.fi/overview/dexs?excludeTotalDataChart=true&excludeTotalDataChartBreakdown=true&dataType=totalVolume',{},{}
) AS dex_read,
ethereum.streamline.udf_api(
'GET','https://api.llama.fi/overview/options?excludeTotalDataChart=true&excludeTotalDataChartBreakdown=true&dataType=totalPremiumVolume',{},{}
) AS options_read,
SYSDATE() AS _inserted_timestamp
)
SELECT
VALUE:module::STRING AS dex_id,
VALUE:name::STRING AS dex,
VALUE:category::STRING AS category,
VALUE:chains AS chains,
_inserted_timestamp
FROM base,
LATERAL FLATTEN (input=> dex_read:data:protocols)
{% if is_incremental() %}
WHERE dex_id NOT IN (
SELECT
DISTINCT dex_id
FROM
{{ this }}
)
{% endif %}
UNION
SELECT
VALUE:module::STRING AS dex_id,
VALUE:name::STRING AS dex,
VALUE:category::STRING AS category,
VALUE:chains AS chains,
_inserted_timestamp
FROM base,
LATERAL FLATTEN (input=> options_read:data:protocols)
{% if is_incremental() %}
WHERE dex_id NOT IN (
SELECT
DISTINCT dex_id
FROM
{{ this }}
)
{% endif %}

View File

@ -0,0 +1,23 @@
version: 2
models:
- name: silver__defillama_dexes
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- dex_id
columns:
- name: dex_id
tests:
- not_null
- name: DEX
tests:
- not_null
- name: CATEGORY
- name: CHAINS
- name: _INSERTED_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_NTZ

View File

@ -0,0 +1,45 @@
{{ config(
materialized = 'incremental',
unique_key = 'protocol_id',
tags = ['defillama']
) }}
WITH protocol_base AS (
SELECT
ethereum.streamline.udf_api(
'GET','https://api.llama.fi/protocols',{},{}
) AS read,
SYSDATE() AS _inserted_timestamp
)
SELECT
VALUE:id::STRING AS protocol_id,
VALUE:slug::STRING AS protocol_slug,
VALUE:name::STRING AS protocol,
CASE
WHEN VALUE:address::STRING = '-' THEN NULL
ELSE SUBSTRING(LOWER(VALUE:address::STRING), CHARINDEX(':', LOWER(VALUE:address::STRING))+1)
END AS address,
CASE
WHEN VALUE:symbol::STRING = '-' THEN NULL
ELSE VALUE:symbol::STRING
END AS symbol,
VALUE:description::STRING AS description,
VALUE:chain::STRING AS chain,
VALUE:audits::INTEGER AS num_audits,
VALUE:audit_note::STRING AS audit_note,
VALUE:category::STRING AS category,
VALUE:chains AS chains,
_inserted_timestamp
FROM protocol_base,
LATERAL FLATTEN (input=> read:data)
{% if is_incremental() %}
WHERE protocol_id NOT IN (
SELECT
DISTINCT protocol_id
FROM
{{ this }}
)
{% endif %}

View File

@ -0,0 +1,30 @@
version: 2
models:
- name: silver__defillama_protocols
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- PROTOCOL_ID
columns:
- name: PROTOCOL_ID
tests:
- not_null
- name: PROTOCOL_SLUG
tests:
- not_null
- name: PROTOCOL
- name: ADDRESS
- name: SYMBOL
- name: DESCRIPTION
- name: CHAIN
- name: NUM_AUDITS
- name: AUDIT_NOTE
- name: CATEGORY
- name: CHAINS
- name: _INSERTED_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_NTZ

View File

@ -0,0 +1,36 @@
{{ config(
materialized = 'incremental',
unique_key = 'stablecoin_id',
tags = ['defillama']
) }}
WITH stablecoin_base AS (
SELECT
ethereum.streamline.udf_api(
'GET','https://stablecoins.llama.fi/stablecoins?includePrices=false',{},{}
) AS read,
SYSDATE() AS _inserted_timestamp
)
SELECT
VALUE:id::STRING AS stablecoin_id,
VALUE:name::STRING AS stablecoin,
VALUE:symbol::STRING AS symbol,
VALUE:pegType::STRING AS peg_type,
VALUE:pegMechanism::STRING AS peg_mechanism,
VALUE:priceSource::STRING AS price_source,
VALUE:chains AS chains,
ROW_NUMBER() OVER (ORDER BY stablecoin) AS row_num,
_inserted_timestamp
FROM stablecoin_base,
LATERAL FLATTEN (input=> read:data:peggedAssets)
{% if is_incremental() %}
WHERE stablecoin_id NOT IN (
SELECT
DISTINCT stablecoin_id
FROM
{{ this }}
)
{% endif %}

View File

@ -0,0 +1,26 @@
version: 2
models:
- name: silver__defillama_stablecoins
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- STABLECOIN_ID
columns:
- name: STABLECOIN_ID
tests:
- not_null
- name: STABLECOIN
tests:
- not_null
- name: SYMBOL
- name: PEG_TYPE
- name: PEG_MECHANISM
- name: PRICE_SOURCE
- name: CHAINS
- name: _INSERTED_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_NTZ

View File

@ -0,0 +1,13 @@
{{ config(
materialized = 'view',
persist_docs ={ "relation": true,
"columns": true },
tags = ['defillama']
) }}
SELECT
bridge_id,
bridge,
chains,
destination_chain
FROM {{ ref('bronze__defillama_bridges') }}

View File

@ -0,0 +1,14 @@
version: 2
models:
- name: defillama__dim_bridges
description: This table contains dimensional information about the bridges listed on Defillama.
columns:
- name: BRIDGE_ID
description: Unique identifier of the bridge.
- name: BRIDGE
description: Name of the bridge.
- name: CHAINS
description: Array of the various chains or networks that the bridge interacts with.
- name: DESTINATION_CHAIN
description: The chain that serves as the primary destination for the bridge.

View File

@ -0,0 +1,12 @@
{{ config(
materialized = 'view',
persist_docs ={ "relation": true,
"columns": true },
tags = ['defillama']
) }}
SELECT
chain_id,
chain,
token_symbol
FROM {{ ref('bronze__defillama_chains') }}

View File

@ -0,0 +1,12 @@
version: 2
models:
- name: defillama__dim_chains
description: This table contains dimensional information about the blockchains and networks listed on Defillama.
columns:
- name: CHAIN_ID
description: Unique identifier of the chain.
- name: CHAIN
description: Name of the blockchain.
- name: TOKEN_SYMBOL
description: Symbol for the primary token of the chain.

View File

@ -0,0 +1,13 @@
{{ config(
materialized = 'view',
persist_docs ={ "relation": true,
"columns": true },
tags = ['defillama']
) }}
SELECT
dex_id,
dex,
category,
chains
FROM {{ ref('bronze__defillama_dexes') }}

View File

@ -0,0 +1,14 @@
version: 2
models:
- name: defillama__dim_dexes
description: This table contains dimensional information about the decentralized exchanges (DEX) listed on Defillama.
columns:
- name: dex_id
description: Unique identifier of the dex.
- name: DEX
description: Name of the dex.
- name: CATEGORY
description: The category of protocol the decentralized exhange belongs to (e.g. Dexes, Options, Yield, Derivatives etc.).
- name: CHAINS
description: Array of the various chains or networks that the dex is deployed on.

View File

@ -0,0 +1,26 @@
{{ config(
materialized = 'view',
persist_docs ={ "relation": true,
"columns": true },
tags = ['defillama']
) }}
SELECT
pool AS pool_id,
project AS protocol,
symbol,
chain,
CASE
WHEN rewardtokens ILIKE 'none' THEN NULL
ELSE LOWER(rewardtokens)
END AS reward_tokens,
CASE
WHEN underlyingtokens ILIKE 'none' THEN NULL
ELSE LOWER(underlyingtokens)
END AS underlying_tokens,
stablecoin AS is_stablecoin,
ilrisk,
exposure AS exposure_type,
poolmeta AS pool_metadata
FROM
{{ ref('bronze__defillama_api_pools_20230209_131432') }}

View File

@ -0,0 +1,26 @@
version: 2
models:
- name: defillama__dim_pools
description: This table contains dimensional information about DeFi pools listed on DefiLlama.
columns:
- name: POOL_ID
description: The unique identifier for each pool.
- name: PROTOCOL
description: The protocol the pool is part of.
- name: SYMBOL
description: The symbol of the pool.
- name: CHAIN
description: The chain the pool is located on.
- name: REWARD_TOKENS
description: The addresses of the tokens rewarded by the pool.
- name: UNDERLYING_TOKENS
description: The underlying tokens that make up the pool.
- name: IS_STABLECOIN
description: Boolean column displaying whether the pool contains a stablecoin or not.
- name: ILRISK
description: Boolean column displaying whether the pool has explicit illiquidity risk.
- name: EXPOSURE_TYPE
description: The type of exposure of the pool.
- name: POOL_METADATA
description: Metadata of the pool.

View File

@ -0,0 +1,20 @@
{{ config(
materialized = 'view',
persist_docs ={ "relation": true,
"columns": true },
tags = ['defillama']
) }}
SELECT
protocol_id,
protocol_slug,
protocol,
address,
symbol,
description,
chain,
chains,
category,
num_audits,
audit_note
FROM {{ ref('bronze__defillama_protocols') }}

View File

@ -0,0 +1,28 @@
version: 2
models:
- name: defillama__dim_protocols
description: This table contains dimensional information about the protocols listed on Defillama.
columns:
- name: PROTOCOL_ID
description: Unique identifier for the protocol.
- name: PROTOCOL_SLUG
description: Slug for the protocol.
- name: PROTOCOL
description: Name of the protocol.
- name: ADDRESS
description: Contract address of the protocol.
- name: SYMBOL
description: Symbol for the protocol.
- name: DESCRIPTION
description: Description of the protocol.
- name: CHAIN
description: Chain the protocol is operating on.
- name: CHAINS
description: Chains the protocol is available on.
- name: CATEGORY
description: Category the protocol belongs to.
- name: NUM_AUDITS
description: Number of audits the protocol has undergone.
- name: AUDIT_NOTE
description: Notes on the audits of the protocol.

View File

@ -0,0 +1,16 @@
{{ config(
materialized = 'view',
persist_docs ={ "relation": true,
"columns": true },
tags = ['defillama']
) }}
SELECT
stablecoin_id,
stablecoin,
symbol,
peg_type,
peg_mechanism,
price_source,
chains
FROM {{ ref('bronze__defillama_stablecoins') }}

View File

@ -0,0 +1,20 @@
version: 2
models:
- name: defillama__dim_stablecoins
description: This table contains dimensional information about the stablecoins listed on Defillama.
columns:
- name: STABLECOIN_ID
description: Unique identifier of the stablecoin.
- name: STABLECOIN
description: Name of the stablecoin.
- name: SYMBOL
description: Symbol of the stablecoin.
- name: PEG_TYPE
description: Type of asset that the stablecoin is pegged to.
- name: PEG_MECHANISM
description: The method used for the stablecoin peg (e.g. algorithmic, crypto-backed etc.).
- name: PRICE_SOURCE
description: The source of the prices listed on Defillama.
- name: CHAINS
description: Array of the various chains or networks that the stablecoin is deployed on.

View File

@ -0,0 +1,17 @@
{{ config(
materialized = 'view',
persist_docs ={ "relation": true,
"columns": true },
tags = ['defillama']
) }}
SELECT
TIMESTAMP :: DATE AS DATE,
bridge_id,
bridge,
deposit_txs,
deposit_usd,
withdraw_txs,
withdraw_usd
FROM
{{ ref('silver__defillama_bridge_volume') }}

View File

@ -0,0 +1,20 @@
version: 2
models:
- name: defillama__fact_bridge_volume
description: This table contains historical deposit and withdraw transaction volume for the bridges listed on Defillama in `dim_bridges`.
columns:
- name: DATE
description: Date associated with the reported records.
- name: BRIDGE_ID
description: Unique identifier of the bridge.
- name: BRIDGE
description: Name of the bridge.
- name: DEPOSIT_TXS
description: Total number of deposit transactions to the bridge.
- name: DEPOSIT_USD
description: Total value of deposits to the bridge, denominated in USD.
- name: WITHDRAW_TXS
description: Total number of withdrawal transactions to the bridge.
- name: WITHDRAW_USD
description: Total value of withdrawals to the bridge, denominated in USD.

View File

@ -0,0 +1,14 @@
{{ config(
materialized = 'view',
persist_docs ={ "relation": true,
"columns": true },
tags = ['defillama']
) }}
SELECT
TIMESTAMP :: DATE AS DATE,
chain,id,
chain,
tvl_usd
FROM
{{ ref('silver__defillama_chains_tvl') }}

View File

@ -0,0 +1,14 @@
version: 2
models:
- name: defillama__fact_chain_tvl
description: This table contains historical TVL (Total Value Locked) for the chains listed on Defillama in `dim_chains`.
columns:
- name: DATE
description: Date associated with the reported TVL.
- name: CHAIN_ID
description: Unique identifier of the blockchain.
- name: CHAIN
description: Name of the blockchain.
- name: TVL_USD
description: Total Value Locked on the blockchain denominated in USD.

View File

@ -0,0 +1,14 @@
{{ config(
materialized = 'view',
persist_docs ={ "relation": true,
"columns": true },
tags = ['defillama']
) }}
SELECT
TIMESTAMP :: DATE AS DATE,
chain,
protocol,
daily_volume AS volume
FROM
{{ ref('silver__defillama_dex_volume') }} f

View File

@ -0,0 +1,14 @@
version: 2
models:
- name: defillama__fact_dex_volume
description: This table contains historical decentralized exchange volumes for the protocols listed on Defillama in `dim_protocols`, where available.
columns:
- name: DATE
description: Date associated with the reported records.
- name: CHAIN
description: The name of the blockchain.
- name: PROTOCOL
description: The name of the protocol.
- name: VOLUME
description: The total volume driven by the protocol on a daily basis, denominated in USD.

View File

@ -0,0 +1,18 @@
{{ config(
materialized = 'view',
persist_docs ={ "relation": true,
"columns": true },
tags = ['defillama']
) }}
SELECT
p.TIMESTAMP :: DATE AS DATE,
p.chain,
p.protocol,
COALESCE(daily_volume_premium,0) AS volume_premium,
COALESCE(daily_volume_notional,0) AS volume_notional
FROM
{{ ref('silver__defillama_options_premium') }} p
LEFT JOIN
{{ ref('silver__defillama_options_notional') }} n
ON p.TIMESTAMP = n.TIMESTAMP AND p.chain = n.chain AND p.protocol = n.protocol

View File

@ -0,0 +1,16 @@
version: 2
models:
- name: defillama__fact_options_volume
description: This table contains historical options volumes for the protocols listed on Defillama in `dim_protocols`, where available.
columns:
- name: DATE
description: Date associated with the reported records.
- name: CHAIN
description: The name of the blockchain.
- name: PROTOCOL
description: The name of the protocol.
- name: VOLUME_PREMIUM
description: The total premium volume driven by the protocol on a daily basis, denominated in USD.
- name: VOLUME_NOTIONAL
description: The total notional volume driven by the protocol on a daily basis, denominated in USD.

View File

@ -0,0 +1,18 @@
{{ config(
materialized = 'view',
persist_docs ={ "relation": true,
"columns": true },
tags = ['defillama']
) }}
SELECT
f.TIMESTAMP :: DATE AS DATE,
f.chain,
f.protocol,
COALESCE(daily_fees,0) AS fees,
COALESCE(daily_rev,0) AS revenue
FROM
{{ ref('silver__defillama_protocol_fees') }} f
LEFT JOIN
{{ ref('silver__defillama_protocol_revenue') }} r
ON f.TIMESTAMP = r.TIMESTAMP AND f.chain = r.chain AND f.protocol = r.protocol

View File

@ -0,0 +1,16 @@
version: 2
models:
- name: defillama__fact_protocol_fees_revenue
description: This table contains historical fees and revenue for the protocols listed on Defillama in `dim_protocols`, where available.
columns:
- name: DATE
description: Date associated with the reported records.
- name: CHAIN
description: The name of the blockchain.
- name: PROTOCOL
description: The name of the protocol.
- name: FEES
description: The total amount of fees earned by the protocol on a daily basis, denominated in USD.
- name: REVENUE
description: The total amount of revenue earned by the protocol on a daily basis, denominated in USD.

View File

@ -0,0 +1,58 @@
{{ config(
materialized = 'incremental',
unique_key = 'id',
full_refresh = false,
tags = ['defillama']
) }}
WITH bridge_base AS (
{% for item in range(5) %}
(
SELECT
bridge_id,
bridge,
ethereum.streamline.udf_api(
'GET',CONCAT('https://bridges.llama.fi/bridgevolume/all?id=',bridge_id),{},{}
) AS read,
SYSDATE() AS _inserted_timestamp
FROM (
SELECT
bridge_id,
bridge,
row_num
FROM {{ ref('bronze__defillama_bridges') }}
WHERE row_num BETWEEN {{ item * 10 + 1 }} AND {{ (item + 1) * 10}}
)
{% if is_incremental() %}
WHERE bridge_id NOT IN (
SELECT
bridge_id
FROM (
SELECT
DISTINCT bridge_id,
MAX(timestamp::DATE) AS max_timestamp
FROM {{ this }}
GROUP BY 1
HAVING CURRENT_DATE = max_timestamp
)
)
{% endif %}
) {% if not loop.last %}
UNION ALL
{% endif %}
{% endfor %}
)
SELECT
bridge_id,
bridge,
TO_TIMESTAMP(VALUE:date::INTEGER) AS timestamp,
VALUE:depositTxs::INTEGER AS deposit_txs,
VALUE:depositUSD::INTEGER AS deposit_usd,
VALUE:withdrawTxs::INTEGER AS withdraw_txs,
VALUE:withdrawUSD::INTEGER AS withdraw_usd,
_inserted_timestamp,
CONCAT(bridge_id,'-',bridge,'-',timestamp) AS id
FROM bridge_base,
LATERAL FLATTEN (input=> read:data)

View File

@ -0,0 +1,29 @@
version: 2
models:
- name: silver__defillama_bridge_volume
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- BRIDGE_ID
- TIMESTAMP
columns:
- name: BRIDGE_ID
tests:
- not_null
- name: BRIDGE
tests:
- not_null
- name: DEPOSIT_TXS
- name: DEPOSIT_USD
- name: WITHDRAW_TXS
- name: WITHDRAW_USD
- name: _INSERTED_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_NTZ
- name: ID
tests:
- not_null

View File

@ -0,0 +1,57 @@
{{ config(
materialized = 'incremental',
unique_key = 'id',
full_refresh = false,
tags = ['defillama']
) }}
WITH tvl_base AS (
{% for item in range(5) %}
(
SELECT
chain_id,
chain,
ethereum.streamline.udf_api(
'GET',CONCAT('https://api.llama.fi/charts/',chain),{},{}
) AS read,
SYSDATE() AS _inserted_timestamp
FROM (
SELECT
DISTINCT chain,
chain_id,
row_num
FROM {{ ref('bronze__defillama_chains') }}
WHERE row_num BETWEEN {{ item * 60 + 1 }} AND {{ (item + 1) * 60 }}
)
{% if is_incremental() %}
WHERE chain NOT IN (
SELECT
chain
FROM (
SELECT
DISTINCT chain,
MAX(timestamp::DATE) AS max_timestamp
FROM {{ this }}
GROUP BY 1
HAVING CURRENT_DATE = max_timestamp
)
)
{% endif %}
) {% if not loop.last %}
UNION ALL
{% endif %}
{% endfor %}
)
SELECT
chain_id,
chain,
TO_TIMESTAMP(VALUE:date::INTEGER) AS timestamp,
VALUE:totalLiquidityUSD::INTEGER AS tvl_usd,
_inserted_timestamp,
{{ dbt_utils.surrogate_key(
['chain_id', 'chain', 'timestamp']
) }} AS id
FROM tvl_base,
LATERAL FLATTEN (input=> read:data)

View File

@ -0,0 +1,25 @@
version: 2
models:
- name: silver__defillama_chains_tvl
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- CHAIN
- TIMESTAMP
columns:
- name: CHAIN_ID
- name: CHAIN
tests:
- not_null
- name: TIMESTAMP
- name: TVL_USD
- name: _INSERTED_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_NTZ
- name: ID
tests:
- not_null

View File

@ -0,0 +1,83 @@
{{ config(
materialized = 'incremental',
unique_key = 'id',
full_refresh = false,
tags = ['defillama']
) }}
WITH all_chains_dex_base AS (
SELECT
LOWER(VALUE::STRING) AS chain,
ROW_NUMBER() OVER (ORDER BY chain) AS row_num,
_inserted_timestamp
FROM (
SELECT
ethereum.streamline.udf_api(
'GET','https://api.llama.fi/overview/dexs?excludeTotalDataChart=true&excludeTotalDataChartBreakdown=true&dataType=dailyVolume',{},{}
) AS read,
SYSDATE() AS _inserted_timestamp
),
LATERAL FLATTEN (input=> read:data:allChains)
),
dex_base AS (
{% for item in range(5) %}
(
SELECT
chain,
ethereum.streamline.udf_api(
'GET',CONCAT('https://api.llama.fi/overview/dexs/',chain,'?excludeTotalDataChart=true&excludeTotalDataChartBreakdown=false&dataType=dailyVolume'),{},{}
) AS read,
SYSDATE() AS _inserted_timestamp
FROM (
SELECT
DISTINCT chain,
row_num
FROM all_chains_dex_base
WHERE row_num BETWEEN {{ item * 20 + 1 }} AND {{ (item + 1) * 20 }}
)
{% if is_incremental() %}
WHERE chain NOT IN (
SELECT
chain
FROM (
SELECT
DISTINCT chain,
MAX(timestamp::DATE) AS max_timestamp
FROM {{ this }}
GROUP BY 1
HAVING CURRENT_DATE = max_timestamp
)
)
{% endif %}
) {% if not loop.last %}
UNION ALL
{% endif %}
{% endfor %}
),
reads_output AS (
SELECT
chain,
TO_TIMESTAMP(VALUE[0]::INTEGER) AS timestamp,
VALUE[1] AS dex_object,
_inserted_timestamp
FROM dex_base,
LATERAL FLATTEN (input=> read:data:totalDataChartBreakdown)
)
SELECT
chain,
timestamp,
key::STRING AS protocol,
value::INTEGER AS daily_volume,
dex_object,
_inserted_timestamp,
{{ dbt_utils.surrogate_key(
['chain', 'protocol', 'timestamp']
) }} AS id
FROM reads_output,
LATERAL FLATTEN(input => PARSE_JSON(reads_output.dex_object))

View File

@ -0,0 +1,25 @@
version: 2
models:
- name: silver__defillama_dex_volume
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- ID
columns:
- name: CHAIN
tests:
- not_null
- name: TIMESTAMP
- name: PROTOCOL
- name: DAILY_VOLUME
- name: DEX_OBJECT
- name: _INSERTED_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_NTZ
- name: ID
tests:
- not_null

View File

@ -0,0 +1,83 @@
{{ config(
materialized = 'incremental',
unique_key = 'id',
full_refresh = false,
tags = ['defillama']
) }}
WITH all_chains_options_base AS (
SELECT
LOWER(VALUE::STRING) AS chain,
ROW_NUMBER() OVER (ORDER BY chain) AS row_num,
_inserted_timestamp
FROM (
SELECT
ethereum.streamline.udf_api(
'GET','https://api.llama.fi/overview/options?excludeTotalDataChart=true&excludeTotalDataChartBreakdown=true&dataType=dailyNotionalVolume',{},{}
) AS read,
SYSDATE() AS _inserted_timestamp
),
LATERAL FLATTEN (input=> read:data:allChains)
),
options_base AS (
{% for item in range(5) %}
(
SELECT
chain,
ethereum.streamline.udf_api(
'GET',CONCAT('https://api.llama.fi/overview/options/',chain,'?excludeTotalDataChart=true&excludeTotalDataChartBreakdown=false&dataType=dailyNotionalVolume'),{},{}
) AS read,
SYSDATE() AS _inserted_timestamp
FROM (
SELECT
DISTINCT chain,
row_num
FROM all_chains_options_base
WHERE row_num BETWEEN {{ item * 5 + 1 }} AND {{ (item + 1) * 5 }}
)
{% if is_incremental() %}
WHERE chain NOT IN (
SELECT
chain
FROM (
SELECT
DISTINCT chain,
MAX(timestamp::DATE) AS max_timestamp
FROM {{ this }}
GROUP BY 1
HAVING CURRENT_DATE = max_timestamp
)
)
{% endif %}
) {% if not loop.last %}
UNION ALL
{% endif %}
{% endfor %}
),
reads_output AS (
SELECT
chain,
TO_TIMESTAMP(VALUE[0]::INTEGER) AS timestamp,
VALUE[1] AS options_object,
_inserted_timestamp
FROM options_base,
LATERAL FLATTEN (input=> read:data:totalDataChartBreakdown)
)
SELECT
chain,
timestamp,
key::STRING AS protocol,
value::INTEGER AS daily_volume_notional,
options_object,
_inserted_timestamp,
{{ dbt_utils.surrogate_key(
['chain', 'protocol', 'timestamp']
) }} AS id
FROM reads_output,
LATERAL FLATTEN(input => PARSE_JSON(reads_output.options_object))

View File

@ -0,0 +1,25 @@
version: 2
models:
- name: silver__defillama_options_notional
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- ID
columns:
- name: CHAIN
tests:
- not_null
- name: TIMESTAMP
- name: PROTOCOL
- name: DAILY_VOLUME_NOTIONAL
- name: OPTIONS_OBJECT
- name: _INSERTED_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_NTZ
- name: ID
tests:
- not_null

View File

@ -0,0 +1,83 @@
{{ config(
materialized = 'incremental',
unique_key = 'id',
full_refresh = false,
tags = ['defillama']
) }}
WITH all_chains_options_base AS (
SELECT
LOWER(VALUE::STRING) AS chain,
ROW_NUMBER() OVER (ORDER BY chain) AS row_num,
_inserted_timestamp
FROM (
SELECT
ethereum.streamline.udf_api(
'GET','https://api.llama.fi/overview/options?excludeTotalDataChart=true&excludeTotalDataChartBreakdown=true&dataType=dailyPremiumVolume',{},{}
) AS read,
SYSDATE() AS _inserted_timestamp
),
LATERAL FLATTEN (input=> read:data:allChains)
),
options_base AS (
{% for item in range(5) %}
(
SELECT
chain,
ethereum.streamline.udf_api(
'GET',CONCAT('https://api.llama.fi/overview/options/',chain,'?excludeTotalDataChart=true&excludeTotalDataChartBreakdown=false&dataType=dailyPremiumVolume'),{},{}
) AS read,
SYSDATE() AS _inserted_timestamp
FROM (
SELECT
DISTINCT chain,
row_num
FROM all_chains_options_base
WHERE row_num BETWEEN {{ item * 5 + 1 }} AND {{ (item + 1) * 5 }}
)
{% if is_incremental() %}
WHERE chain NOT IN (
SELECT
chain
FROM (
SELECT
DISTINCT chain,
MAX(timestamp::DATE) AS max_timestamp
FROM {{ this }}
GROUP BY 1
HAVING CURRENT_DATE = max_timestamp
)
)
{% endif %}
) {% if not loop.last %}
UNION ALL
{% endif %}
{% endfor %}
),
reads_output AS (
SELECT
chain,
TO_TIMESTAMP(VALUE[0]::INTEGER) AS timestamp,
VALUE[1] AS options_object,
_inserted_timestamp
FROM options_base,
LATERAL FLATTEN (input=> read:data:totalDataChartBreakdown)
)
SELECT
chain,
timestamp,
key::STRING AS protocol,
value::INTEGER AS daily_volume_premium,
options_object,
_inserted_timestamp,
{{ dbt_utils.surrogate_key(
['chain', 'protocol', 'timestamp']
) }} AS id
FROM reads_output,
LATERAL FLATTEN(input => PARSE_JSON(reads_output.options_object))

View File

@ -0,0 +1,25 @@
version: 2
models:
- name: silver__defillama_options_premium
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- ID
columns:
- name: CHAIN
tests:
- not_null
- name: TIMESTAMP
- name: PROTOCOL
- name: DAILY_VOLUME_PREMIUM
- name: OPTIONS_OBJECT
- name: _INSERTED_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_NTZ
- name: ID
tests:
- not_null

View File

@ -0,0 +1,83 @@
{{ config(
materialized = 'incremental',
unique_key = 'id',
full_refresh = false,
tags = ['defillama']
) }}
WITH all_chains_fees_base AS (
SELECT
LOWER(VALUE::STRING) AS chain,
ROW_NUMBER() OVER (ORDER BY chain) AS row_num,
_inserted_timestamp
FROM (
SELECT
ethereum.streamline.udf_api(
'GET','https://api.llama.fi/overview/fees?excludeTotalDataChart=true&excludeTotalDataChartBreakdown=true&dataType=totalFees',{},{}
) AS read,
SYSDATE() AS _inserted_timestamp
),
LATERAL FLATTEN (input=> read:data:allChains)
),
fees_base AS (
{% for item in range(5) %}
(
SELECT
chain,
ethereum.streamline.udf_api(
'GET',CONCAT('https://api.llama.fi/overview/fees/',chain,'?excludeTotalDataChart=true&excludeTotalDataChartBreakdown=false&dataType=dailyFees'),{},{}
) AS read,
SYSDATE() AS _inserted_timestamp
FROM (
SELECT
DISTINCT chain,
row_num
FROM all_chains_fees_base
WHERE row_num BETWEEN {{ item * 15 + 1 }} AND {{ (item + 1) * 15 }}
)
{% if is_incremental() %}
WHERE chain NOT IN (
SELECT
chain
FROM (
SELECT
DISTINCT chain,
MAX(timestamp::DATE) AS max_timestamp
FROM {{ this }}
GROUP BY 1
HAVING CURRENT_DATE = max_timestamp
)
)
{% endif %}
) {% if not loop.last %}
UNION ALL
{% endif %}
{% endfor %}
),
reads_output AS (
SELECT
chain,
TO_TIMESTAMP(VALUE[0]::INTEGER) AS timestamp,
VALUE[1] AS fees_object,
_inserted_timestamp
FROM fees_base,
LATERAL FLATTEN (input=> read:data:totalDataChartBreakdown)
)
SELECT
chain,
timestamp,
key::STRING AS protocol,
value::INTEGER AS daily_fees,
fees_object,
_inserted_timestamp,
{{ dbt_utils.surrogate_key(
['chain', 'protocol', 'timestamp']
) }} AS id
FROM reads_output,
LATERAL FLATTEN(input => PARSE_JSON(reads_output.fees_object))

View File

@ -0,0 +1,25 @@
version: 2
models:
- name: silver__defillama_protocol_fees
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- ID
columns:
- name: CHAIN
tests:
- not_null
- name: TIMESTAMP
- name: PROTOCOL
- name: DAILY_FEES
- name: FEES_OBJECT
- name: _INSERTED_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_NTZ
- name: ID
tests:
- not_null

View File

@ -0,0 +1,83 @@
{{ config(
materialized = 'incremental',
unique_key = 'id',
full_refresh = false,
tags = ['defillama']
) }}
WITH all_chains_rev_base AS (
SELECT
LOWER(VALUE::STRING) AS chain,
ROW_NUMBER() OVER (ORDER BY chain) AS row_num,
_inserted_timestamp
FROM (
SELECT
ethereum.streamline.udf_api(
'GET','https://api.llama.fi/overview/fees?excludeTotalDataChart=true&excludeTotalDataChartBreakdown=true&dataType=totalRevenue',{},{}
) AS read,
SYSDATE() AS _inserted_timestamp
),
LATERAL FLATTEN (input=> read:data:allChains)
),
rev_base AS (
{% for item in range(5) %}
(
SELECT
chain,
ethereum.streamline.udf_api(
'GET',CONCAT('https://api.llama.fi/overview/fees/',chain,'?excludeTotalDataChart=true&excludeTotalDataChartBreakdown=false&dataType=dailyRevenue'),{},{}
) AS read,
SYSDATE() AS _inserted_timestamp
FROM (
SELECT
DISTINCT chain,
row_num
FROM all_chains_rev_base
WHERE row_num BETWEEN {{ item * 15 + 1 }} AND {{ (item + 1) * 15 }}
)
{% if is_incremental() %}
WHERE chain NOT IN (
SELECT
chain
FROM (
SELECT
DISTINCT chain,
MAX(timestamp::DATE) AS max_timestamp
FROM {{ this }}
GROUP BY 1
HAVING CURRENT_DATE = max_timestamp
)
)
{% endif %}
) {% if not loop.last %}
UNION ALL
{% endif %}
{% endfor %}
),
reads_output AS (
SELECT
chain,
TO_TIMESTAMP(VALUE[0]::INTEGER) AS timestamp,
VALUE[1] AS rev_object,
_inserted_timestamp
FROM rev_base,
LATERAL FLATTEN (input=> read:data:totalDataChartBreakdown)
)
SELECT
chain,
timestamp,
key::STRING AS protocol,
value::INTEGER AS daily_rev,
rev_object,
_inserted_timestamp,
{{ dbt_utils.surrogate_key(
['chain', 'protocol', 'timestamp']
) }} AS id
FROM reads_output,
LATERAL FLATTEN(input => PARSE_JSON(reads_output.rev_object))

View File

@ -0,0 +1,25 @@
version: 2
models:
- name: silver__defillama_protocol_revenue
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- ID
columns:
- name: CHAIN
tests:
- not_null
- name: TIMESTAMP
- name: PROTOCOL
- name: DAILY_REV
- name: REV_OBJECT
- name: _INSERTED_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_NTZ
- name: ID
tests:
- not_null

View File

@ -3,7 +3,7 @@
# Welcome to the Flipside Crypto External Models Documentation!
## **What does this documentation cover?**
The documentation included here details the design of the External tables and views available via [Flipside Crypto](https://flipsidecrypto.xyz/earn). For more information on how these models are built, please see [the github repository.](https://github.com/FlipsideCrypto/external-models)
The documentation included here details the design of the External tables and views available via [Flipside Crypto](https://flipsidecrypto.xyz/). The models in the External database leverage non-Flipside curated datasets and APIs. While Flipside has the ability to host these datasets, we do not have authority over the data quality or structure of the outputs. For more information on how these models are built, please see [the github repository.](https://github.com/FlipsideCrypto/external-models)
### **Quick Links to Table Documentation**
@ -19,8 +19,6 @@ The documentation included here details the design of the External tables and vi
- [tokenflow_eth__storage_reads](https://flipsidecrypto.github.io/external-models/#!/source/source.external_models.tokenflow_eth.storage_reads)
- [tokenflow_eth__transactions](https://flipsidecrypto.github.io/external-models/#!/source/source.external_models.tokenflow_eth.transactions)
**Token Flow: Starknet**
[Token Flow Documentation](https://docs.tokenflow.live/)
@ -35,10 +33,26 @@ The documentation included here details the design of the External tables and vi
- [tokenflow_starknet__l1_data_messages](https://flipsidecrypto.github.io/external-models/#!/source/source.external_models.tokenflow_starknet_l1_data.messages)
- [tokenflow_starknet__l1_data_storage_diffs](https://flipsidecrypto.github.io/external-models/#!/source/source.external_models.tokenflow_starknet_l1_data.storage_diffs)
**DefiLlama**
[DefiLlama Documentation](https://defillama.com/docs/api)
- [defillama__dim_bridges](https://flipsidecrypto.github.io/external-models/#!/source/source.external_models.defillama.dim_blocks)
- [defillama__dim_chains](https://flipsidecrypto.github.io/external-models/#!/source/source.external_models.defillama.dim_chains)
- [defillama__dim_dexes](https://flipsidecrypto.github.io/external-models/#!/source/source.external_models.defillama.dim_dexes)
- [defillama__dim_pools](https://flipsidecrypto.github.io/external-models/#!/source/source.external_models.defillama.dim_pools)
- [defillama__dim_protocols](https://flipsidecrypto.github.io/external-models/#!/source/source.external_models.defillama.dim_protocols)
- [defillama__dim_stablecoins](https://flipsidecrypto.github.io/external-models/#!/source/source.external_models.defillama.dim_stablecoins)
- [defillama__fact_bridge_volume](https://flipsidecrypto.github.io/external-models/#!/source/source.external_models.defillama.fact_bridge_volume)
- [defillama__fact_chain_tvl](https://flipsidecrypto.github.io/external-models/#!/source/source.external_models.defillama.fact_chain_tvl)
- [defillama__fact_dex_volume](https://flipsidecrypto.github.io/external-models/#!/source/source.external_models.defillama.fact_dex_volume)
- [defillama__fact_options_volume](https://flipsidecrypto.github.io/external-models/#!/source/source.external_models.defillama.fact_options_volume)
- [defillama__fact_protocol_fees_revenue](https://flipsidecrypto.github.io/external-models/#!/source/source.external_models.defillama.fact_protocol_fees_revenue)
## **Data Model Overview**
`EXTERNAL` is our blockchain-agonistic database for datasets derived from independently managed, external sources. While these models are built a few different ways, the primary method used is through calling internal functions that leverage curated datasets, such as Token Flow, to create accessible sql models for the analytics community. These models follow our standard approach, built using three layers of sql models: **bronze, silver, and gold (or core).** However, when the models are built externally (non-Flipside), the naming conventions may vary and models will be placed into schemas based on the source.
`EXTERNAL` is our blockchain-agonistic database for datasets derived from independently managed, external sources. While these models are built a few different ways, the primary method used is through calling internal functions that leverage curated datasets, such as Token Flow or DefiLlama's API endpoints, to create accessible sql models for the analytics community. These models follow our standard approach, built using three layers of sql models: **bronze, silver, and gold (or core).** However, when the models are built externally (non-Flipside), the naming conventions may vary and models will be placed into schemas based on the source.
- Bronze: Data is loaded in from the source as a view
- Silver: All necessary parsing, filtering, de-duping, and other transformations are done here
@ -70,11 +84,10 @@ Note that you can also right-click on models to interactively filter and explore
### **More information**
- [Flipside](https://flipsidecrypto.xyz/earn)
- [Flipside](https://flipsidecrypto.xyz/)
- [Velocity](https://app.flipsidecrypto.com/velocity?nav=Discover)
- [Tutorials](https://docs.flipsidecrypto.com/our-data/tutorials)
- [Github](https://github.com/FlipsideCrypto/external-models)
- [Query Editor Shortcuts](https://docs.flipsidecrypto.com/velocity/query-editor-shortcuts)
- [What is dbt?](https://docs.getdbt.com/docs/introduction)