Add/deep nft value (#14)

This commit is contained in:
Austin 2023-02-21 14:33:09 -05:00 committed by GitHub
parent 8aee821493
commit fa5cbfd3de
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
26 changed files with 948 additions and 1 deletions

44
.github/workflows/dbt_run_bi_hourly.yml vendored Normal file
View File

@ -0,0 +1,44 @@
name: dbt_run_scheduled_bi_hourly
run-name: dbt_run_scheduled_bi_hourly
on:
workflow_dispatch:
schedule:
# Runs "every 2 hours" (see https://crontab.guru)
- cron: '0 1-23/2 * * *'
env:
DBT_PROFILES_DIR: ${{ vars.DBT_PROFILES_DIR }}
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v1
with:
python-version: "3.7.x"
- name: install dependencies
run: |
pip3 install dbt-snowflake==${{ vars.DBT_VERSION }} cli_passthrough requests click
dbt deps
- name: Run DBT Jobs
run: |
dbt run -m models/deepnftvalue models/silver__dates.sql --exclude models/deepnftvalue/bronze/bronze__dnv_historical_valuations.sql

View File

@ -0,0 +1,44 @@
name: dbt_run_temp_backfill
run-name: dbt_run_temp_backfill
on:
workflow_dispatch:
schedule:
# Runs "every 5 mins" (see https://crontab.guru)
- cron: '*/5 * * * *'
env:
DBT_PROFILES_DIR: ${{ vars.DBT_PROFILES_DIR }}
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v1
with:
python-version: "3.7.x"
- name: install dependencies
run: |
pip3 install dbt-snowflake==${{ vars.DBT_VERSION }} cli_passthrough requests click
dbt deps
- name: Run DBT Jobs
run: |
dbt run -m models/deepnftvalue/bronze/bronze__dnv_historical_valuations.sql

View File

@ -41,4 +41,4 @@ jobs:
dbt deps
- name: Run DBT Jobs
run: |
dbt run --exclude tag:defillama
dbt run --exclude tag:defillama models/deepnftvalue

View File

@ -0,0 +1,27 @@
{{ config(
materialized = 'table'
) }}
SELECT
'boredapeyachtclub' AS collection_slug,
'0xbc4ca0eda7647a8ab7c2061c2e118a18a936f13d' AS contract_address,
'2022-06-01' AS created_at,
10000 AS total_supply
UNION
SELECT
'cryptopunks',
'0xb47e3cd837ddf8e4c57f05d70ab865de6e193bbb',
'2021-01-01',
10000
UNION
SELECT
'azuki',
'0xed5af388653567af2f388e6224dc7c4b3241c544',
'2022-06-01' AS created_at,
10000
UNION
SELECT
'mutant-ape-yacht-club',
'0x60E4d786628Fea6478F785A6d7e704777c86a7c6',
'2022-06-01' AS created_at,
20000

View File

@ -0,0 +1,51 @@
{{ config(
materialized = 'incremental',
unique_key = '_id',
full_refresh = false
) }}
WITH slugs AS (
SELECT
collection_slug
FROM
{{ ref('bronze__dnv_collection_slugs') }}
),
api_endpoint AS (
SELECT
'https://api.deepnftvalue.com/v1/collections/' AS api_endpoint
),
api_url AS (
SELECT
api_endpoint || collection_slug AS api_url,
collection_slug
FROM
slugs
CROSS JOIN api_endpoint
),
api_key AS (
SELECT
CONCAT(
'{\'Authorization\': \'Token ',
api_key,
'\', \'accept\': \'application/json\'}'
) AS header
FROM
{{ source(
'crosschain_silver',
'apis_keys'
) }}
WHERE
api_name = 'deepnftvalue'
)
SELECT
ethereum.streamline.udf_api(' GET ', api_url, PARSE_JSON(header),{}) AS resp,
SYSDATE() _inserted_timestamp,
CONCAT(
collection_slug,
'-',
_inserted_timestamp
) AS _id
FROM
api_url
CROSS JOIN api_key

View File

@ -0,0 +1,15 @@
version: 2
models:
- name: bronze__dnv_collections
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _ID
columns:
- name: _INSERTED_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 1

View File

@ -0,0 +1,57 @@
{{ config(
materialized = 'incremental',
unique_key = 'api_url',
full_refresh = false
) }}
WITH requests AS (
SELECT
api_url,
date_day
FROM
{{ ref('silver__dnv_historical_requests') }}
{% if is_incremental() %}
WHERE
api_url NOT IN (
SELECT
api_url
FROM
{{ this }}
)
{% endif %}
ORDER BY
date_day DESC
LIMIT
3
), api_key AS (
SELECT
CONCAT(
'{\'Authorization\': \'Token ',
api_key,
'\', \'accept\': \'application/json\'}'
) AS header
FROM
{{ source(
'crosschain_silver',
'apis_keys'
) }}
WHERE
api_name = 'deepnftvalue'
)
SELECT
ethereum.streamline.udf_api(' GET ', api_url, PARSE_JSON(header),{}) AS resp,
SYSDATE() _inserted_timestamp,
api_url
FROM
requests
JOIN api_key
ON 1 = 1
WHERE
EXISTS (
SELECT
1
FROM
requests
)

View File

@ -0,0 +1,15 @@
version: 2
models:
- name: bronze__dnv_historical_valuations
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- API_URL
columns:
- name: _INSERTED_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 1

View File

@ -0,0 +1,59 @@
{{ config(
materialized = 'incremental',
unique_key = '_id',
full_refresh = false
) }}
WITH requests AS (
SELECT
api_url,
collection_slug,
_id
FROM
{{ ref('silver__dnv_latest_valuation_requests') }}
{% if is_incremental() %}
WHERE
_id NOT IN (
SELECT
_id
FROM
{{ this }}
)
{% endif %}
ORDER BY
collection_slug
LIMIT
4
), api_key AS (
SELECT
CONCAT(
'{\'Authorization\': \'Token ',
api_key,
'\', \'accept\': \'application/json\'}'
) AS header
FROM
{{ source(
'crosschain_silver',
'apis_keys'
) }}
WHERE
api_name = 'deepnftvalue'
)
SELECT
ethereum.streamline.udf_api(' GET ', api_url, PARSE_JSON(header),{}) AS resp,
SYSDATE() _inserted_timestamp,
collection_slug,
_id
FROM
requests
JOIN api_key
ON 1 = 1
WHERE
EXISTS (
SELECT
1
FROM
requests
)

View File

@ -0,0 +1,15 @@
version: 2
models:
- name: bronze__dnv_latest_valuations
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _ID
columns:
- name: _INSERTED_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 1

View File

@ -0,0 +1,59 @@
{{ config(
materialized = 'incremental',
unique_key = '_id',
full_refresh = false
) }}
WITH requests AS (
SELECT
api_url,
collection_slug,
_id
FROM
{{ ref('silver__dnv_token_requests') }}
{% if is_incremental() %}
WHERE
_id NOT IN (
SELECT
_id
FROM
{{ this }}
)
{% endif %}
ORDER BY
collection_slug
LIMIT
2
), api_key AS (
SELECT
CONCAT(
'{\'Authorization\': \'Token ',
api_key,
'\', \'accept\': \'application/json\'}'
) AS header
FROM
{{ source(
'crosschain_silver',
'apis_keys'
) }}
WHERE
api_name = 'deepnftvalue'
)
SELECT
ethereum.streamline.udf_api(' GET ', api_url, PARSE_JSON(header),{}) AS resp,
SYSDATE() _inserted_timestamp,
collection_slug,
_id
FROM
requests
JOIN api_key
ON 1 = 1
WHERE
EXISTS (
SELECT
1
FROM
requests
)

View File

@ -0,0 +1,15 @@
version: 2
models:
- name: bronze__dnv_tokens
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _ID
columns:
- name: _INSERTED_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 1

View File

@ -0,0 +1,21 @@
{{ config(
materialized = 'view',
meta={
'database_tags':{
'table': {
'PROTOCOL': 'DEEPNFTVALUE'
}
}
}
) }}
SELECT
collection_name,
lower(collection_address) AS collection_address,
floor_price,
valuation_max,
valuation_min,
attributes,
_inserted_timestamp AS valuation_timestamp
FROM
{{ ref('silver__dnv_collections') }}

View File

@ -0,0 +1,28 @@
{{ config(
materialized = 'view',
meta={
'database_tags':{
'table': {
'PROTOCOL': 'DEEPNFTVALUE'
}
}
}
) }}
SELECT
collection_name,
lower(contract_address) AS collection_address,
token_id,
active_offer,
attributes,
image,
is_flagged,
last_sale,
owner,
rarity,
rarity_rank,
top_bid,
valuation,
_inserted_timestamp AS updated_timestamp
FROM
{{ ref('silver__dnv_tokens') }}

View File

@ -0,0 +1,21 @@
{{ config(
materialized = 'view',
meta={
'database_tags':{
'table': {
'PROTOCOL': 'DEEPNFTVALUE'
}
}
}
) }}
SELECT
date_day AS valuation_date,
collection_name,
lower(contract_address) AS collection_address,
token_id,
currency,
price,
_inserted_timestamp AS updated_timestamp
FROM
{{ ref('silver__dnv_valuations') }}

View File

@ -0,0 +1,65 @@
{{ config(
materialized = 'table'
) }}
WITH slugs AS (
SELECT
collection_slug,
total_supply,
created_at,
5000 AS limiter,
total_supply / limiter AS total_pages
FROM
{{ ref('bronze__dnv_collection_slugs') }}
),
date_series AS (
SELECT
date_day
FROM
{{ ref('silver__dates') }}
),
generate_sequence AS (
SELECT
SEQ4() AS seq
FROM
TABLE(GENERATOR(rowcount => 100))
),
all_tokens AS (
SELECT
*,
seq * limiter AS offset
FROM
date_series
JOIN slugs
ON created_at <= date_day
AND date_day < SYSDATE() :: DATE
JOIN generate_sequence
ON seq <= total_pages - 1
)
SELECT
collection_slug,
date_day,
CONCAT(
'https://api.deepnftvalue.com/v1/valuations/hist/',
collection_slug,
'?limit=',
limiter,
'&token_ids=all&start=',
date_day,
'&end=',
date_day
) AS api_url1,
CASE
WHEN offset = 0 THEN ''
ELSE CONCAT(
'&offset=',
offset
)
END AS api_url2,
CONCAT(
api_url1,
api_url2
) AS api_url
FROM
all_tokens

View File

@ -0,0 +1,63 @@
{{ config(
materialized = 'table'
) }}
WITH slugs AS (
SELECT
collection_slug,
total_supply
FROM
{{ ref('bronze__dnv_collection_slugs') }}
),
offsets AS (
SELECT
2500 AS limiter
),
generate_sequence AS (
SELECT
SEQ4() AS seq
FROM
TABLE(GENERATOR(rowcount => 100000))
),
limits AS (
SELECT
*,
seq * limiter AS offset
FROM
generate_sequence
JOIN offsets
ON 1 = 1
JOIN slugs
ON seq * limiter < total_supply
)
SELECT
*,
CASE
seq
WHEN 0 THEN CONCAT(
'https://api.deepnftvalue.com/v1/valuations/',
collection_slug,
'?limit=',
limiter,
'&token_ids=all'
)
ELSE CONCAT(
'http://api.deepnftvalue.com/v1/valuations/',
collection_slug,
'?limit=',
limiter,
'&offset=',
offset,
'&token_ids=all'
)
END AS api_url,
CONCAT(
collection_slug,
'-',
offset,
'-',
SYSDATE() :: DATE
) AS _id
FROM
limits

View File

@ -0,0 +1,55 @@
{{ config(
materialized = 'table'
) }}
WITH slugs AS (
SELECT
collection_slug,
total_supply
FROM
{{ ref('bronze__dnv_collection_slugs') }}
),
offsets AS (
SELECT
2000 AS limiter
),
generate_sequence AS (
SELECT
SEQ4() AS seq
FROM
TABLE(GENERATOR(rowcount => 100000))
),
limits AS (
SELECT
*,
seq * limiter AS offset
FROM
generate_sequence
JOIN offsets
ON 1 = 1
JOIN slugs
ON seq * limiter < total_supply
)
SELECT
*,
CASE
seq
WHEN 0 THEN CONCAT(
'https://api.deepnftvalue.com/v1/tokens/',
collection_slug,
'?limit=',
limiter
)
ELSE CONCAT(
'https://api.deepnftvalue.com/v1/tokens/',
collection_slug,
'?limit=',
limiter,
'&offset=',
offset
)
END AS api_url,
CONCAT(api_url, '-', SYSDATE() :: DATE) AS _id
FROM
limits

View File

@ -0,0 +1,60 @@
{{ config(
materialized = 'incremental',
unique_key = '_id'
) }}
WITH base AS (
SELECT
resp,
_inserted_timestamp
FROM
{{ ref('bronze__dnv_collections') }}
{% if is_incremental() %}
WHERE
_inserted_timestamp >= (
SELECT
MAX(
_inserted_timestamp
)
FROM
{{ this }}
)
{% endif %}
),
FINAL AS (
SELECT
_inserted_timestamp,
resp :data :attributes AS attributes,
resp :data :contract_address :: STRING AS collection_address,
resp :data :floor_price :: FLOAT AS floor_price,
resp :data :name :: STRING AS collection_name,
resp :data :slug :: STRING AS slug,
resp :data :valuation_max :: FLOAT AS valuation_max,
resp :data :valuation_min :: FLOAT AS valuation_min
FROM
base
)
SELECT
_inserted_timestamp,
attributes,
collection_address,
floor_price,
collection_name,
valuation_max,
valuation_min,
CONCAT(
collection_address,
'-',
DATE_TRUNC(
'hour',
_inserted_timestamp
)
) AS _id
FROM
FINAL qualify ROW_NUMBER() over (
PARTITION BY _id
ORDER BY
_inserted_timestamp DESC
) = 1

View File

@ -0,0 +1,15 @@
version: 2
models:
- name: silver__dnv_collections
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _ID
columns:
- name: _INSERTED_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 1

View File

@ -0,0 +1,72 @@
{{ config(
materialized = 'incremental',
unique_key = '_id'
) }}
WITH base AS (
SELECT
resp,
_inserted_timestamp
FROM
{{ ref('bronze__dnv_tokens') }}
{% if is_incremental() %}
WHERE
_inserted_timestamp >= (
SELECT
MAX(
_inserted_timestamp
)
FROM
{{ this }}
)
{% endif %}
),
FINAL AS (
SELECT
_inserted_timestamp,
VALUE :token_id AS token_id,
VALUE :active_offer AS active_offer,
VALUE :attributes AS attributes,
VALUE :collection AS collection,
VALUE :image AS image,
VALUE :is_flagged AS is_flagged,
VALUE :last_sale AS last_sale,
VALUE :owner AS owner,
VALUE :rarity AS rarity,
VALUE :rarity_rank AS rarity_rank,
VALUE :top_bid AS top_bid,
VALUE :valuation AS valuation
FROM
base,
LATERAL FLATTEN(
input => resp :data :results
)
)
SELECT
token_id,
active_offer,
attributes,
collection :contract :: STRING AS contract_address,
collection :name :: STRING AS collection_name,
image,
is_flagged,
last_sale,
owner,
rarity,
rarity_rank,
top_bid,
valuation,
_inserted_timestamp,
CONCAT(
contract_address,
'-',
token_id
) AS _id
FROM
FINAL qualify ROW_NUMBER() over (
PARTITION BY _id
ORDER BY
_inserted_timestamp DESC
) = 1

View File

@ -0,0 +1,15 @@
version: 2
models:
- name: silver__dnv_tokens
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _ID
columns:
- name: _INSERTED_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 1

View File

@ -0,0 +1,99 @@
{{ config(
materialized = 'incremental',
unique_key = '_id'
) }}
WITH historical_base AS (
SELECT
resp,
_inserted_timestamp
FROM
{{ ref('bronze__dnv_historical_valuations') }}
{% if is_incremental() %}
WHERE
_inserted_timestamp >= (
SELECT
MAX(
_inserted_timestamp
) :: DATE - 1
FROM
{{ this }}
)
{% endif %}
),
latest_base AS (
SELECT
resp,
_inserted_timestamp
FROM
{{ ref('bronze__dnv_latest_valuations') }}
{% if is_incremental() %}
WHERE
_inserted_timestamp >= (
SELECT
MAX(
_inserted_timestamp
) :: DATE - 1
FROM
{{ this }}
)
{% endif %}
),
all_data AS (
SELECT
resp,
_inserted_timestamp
FROM
historical_base
UNION ALL
SELECT
resp,
_inserted_timestamp
FROM
latest_base
),
FINAL AS (
SELECT
_inserted_timestamp,
VALUE :currency :: STRING AS currency,
VALUE :date :: DATE AS date_day,
VALUE :nft :collection :name :: STRING AS collection_name,
VALUE :nft :collection :slug :: STRING AS slug,
VALUE :nft :token_id :: INTEGER AS token_id,
VALUE :price :: FLOAT AS price
FROM
all_data,
LATERAL FLATTEN(
input => resp :data :results
)
)
SELECT
_inserted_timestamp,
currency,
date_day,
collection_name,
slug,
token_id,
price,
contract_address,
CONCAT(
contract_address,
'-',
token_id,
'-',
date_day
) AS _id
FROM
FINAL
JOIN {{ ref('bronze__dnv_collection_slugs') }}
b
ON FINAL.slug = b.collection_slug qualify ROW_NUMBER() over (
PARTITION BY contract_address,
token_id,
date_day
ORDER BY
_inserted_timestamp DESC
) = 1

View File

@ -0,0 +1,15 @@
version: 2
models:
- name: silver__dnv_valuations
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _ID
columns:
- name: _INSERTED_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 1

7
models/silver__dates.sql Normal file
View File

@ -0,0 +1,7 @@
{{ config(
materialized = "table"
) }}
{{ dbt_date.get_date_dimension(
'2017-01-01',
'2027-12-31'
) }}

View File

@ -32,4 +32,14 @@ sources:
- name: tokens_balance_diffs
- name: tokens_metadata
- name: transactions
- name: crosschain_silver
database: crosschain
schema: silver
tables:
- name: apis_keys
- name: ethereum_silver
database: ethereum
schema: silver
tables:
- name: nft_transfers