AN-4881/olas-agent-activity (#152)

* initial model temps

* update contracts and columns

* silver staking

* checkpoint and evict temps

* checkpoint and evicted

* gold views and related column updates

* fixed columns in ez views

* docs

* service staking metadata

* docs

* docs

* initial ymls

* column descriptions

* tests and docs

* docs

* overview docs

* updates for service reads

* coalesce

* error

* row num

* heal

* update registry

* remove test

* column name

* rebuild

* remove docs
This commit is contained in:
drethereum 2024-06-25 16:47:12 -06:00 committed by GitHub
parent 20cbc4f55b
commit 30b21a40e1
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
27 changed files with 2141 additions and 2 deletions

View File

@ -35,7 +35,7 @@ There is more information on how to use dbt docs in the last section of this doc
- [ez_token_transfers](https://flipsidecrypto.github.io/gnosis-models/#!/model/model.gnosis_models.core__ez_token_transfers)
- [ez_decoded_event_logs](https://flipsidecrypto.github.io/gnosis-models/#!/model/model.gnosis_models.core__ez_decoded_event_logs)
### DeFi Tables (gnosis.defi) ###
### DeFi Tables (gnosis.defi)
- [ez_dex_swaps](https://flipsidecrypto.github.io/gnosis-models/#!/model/model.gnosis_models.defi__ez_dex_swaps)
- [ez_bridge_activity](https://flipsidecrypto.github.io/gnosis-models/#!/model/model.gnosis_models.defi__ez_bridge_activity)
- [dim_dex_liquidity_pools](https://flipsidecrypto.github.io/gnosis-models/#!/model/model.gnosis_models.defi__dim_dex_liquidity_pools)
@ -52,7 +52,7 @@ There is more information on how to use dbt docs in the last section of this doc
- [ez_asset_metadata](https://flipsidecrypto.github.io/gnosis-models/#!/model/model.gnosis_models.price__ez_asset_metadata)
- [ez_prices_hourly](https://flipsidecrypto.github.io/gnosis-models/#!/model/model.gnosis_models.price__ez_prices_hourly)
### NFT Tables (gnosis.nft) ###
### NFT Tables (gnosis.nft)
- [ez_nft_transfers](https://flipsidecrypto.github.io/gnosis-models/#!/model/model.gnosis_models.nft__ez_nft_transfers)
### Stats Tables (gnosis.stats)

View File

@ -0,0 +1,153 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = 'getservice_reads_id',
full_refresh = false,
tags = ['curated']
) }}
WITH service_contracts AS (
SELECT
contract_address,
service_id AS registry_id,
MAX(block_number) AS block_number
FROM
{{ ref('silver_olas__service_registrations') }}
{% if is_incremental() %}
WHERE
_inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND CONCAT(
contract_address,
'-',
registry_id
) NOT IN (
SELECT
CONCAT(
contract_address,
'-',
function_input
)
FROM
{{ this }}
)
{% endif %}
GROUP BY
1,
2
),
function_sigs AS (
SELECT
'0xef0e239b' AS function_sig,
'getService' AS function_name
),
inputs AS (
SELECT
contract_address,
block_number,
function_sig,
function_name,
registry_id AS function_input,
CONCAT(
function_sig,
LPAD(
SUBSTR(utils.udf_int_to_hex(function_input), 3),
64,
0)
) AS DATA
FROM
service_contracts
JOIN function_sigs
ON 1 = 1
),
contract_reads AS (
SELECT
contract_address,
block_number,
function_sig,
function_name,
function_input,
DATA,
utils.udf_json_rpc_call(
'eth_call',
[{ 'to': contract_address, 'from': null, 'data': data }, utils.udf_int_to_hex(block_number) ]
) AS rpc_request,
live.udf_api(
'POST',
CONCAT(
'{service}',
'/',
'{Authentication}'
),{},
rpc_request,
'Vault/prod/gnosis/quicknode/mainnet'
) AS read_output,
SYSDATE() AS _inserted_timestamp
FROM
inputs
),
reads_flat AS (
SELECT
read_output,
read_output :data :id :: STRING AS read_id,
read_output :data :result :: STRING AS read_result,
SPLIT(
read_id,
'-'
) AS read_id_object,
regexp_substr_all(SUBSTR(read_result, 3, len(read_result)), '.{64}') AS segmented_read,
utils.udf_hex_to_int(
VALUE :: STRING
) AS decoded_read,
function_sig,
function_name,
function_input,
DATA,
contract_address,
block_number,
_inserted_timestamp
FROM
contract_reads,
LATERAL FLATTEN(
input => segmented_read
)
),
reads_final AS (
SELECT
read_output,
read_id,
read_result,
read_id_object,
segmented_read,
function_sig,
function_name,
function_input,
DATA,
contract_address,
block_number,
_inserted_timestamp,
ARRAY_AGG(TRY_TO_NUMBER(decoded_read)) AS reads_array,
ARRAY_SLICE(reads_array, 9, ARRAY_SIZE(reads_array)) AS agent_ids
FROM
reads_flat
GROUP BY
ALL)
SELECT
*,
{{ dbt_utils.generate_surrogate_key(
['contract_address','function_input']
) }} AS getservice_reads_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp,
'{{ invocation_id }}' AS _invocation_id
FROM
reads_final
WHERE
agent_ids IS NOT NULL
AND agent_ids :: STRING <> '[]'

View File

@ -0,0 +1,17 @@
version: 2
models:
- name: silver_olas__getservice_reads
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- GETSERVICE_READS_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: FUNCTION_INPUT
tests:
- not_null
- name: AGENT_IDS
tests:
- not_null

View File

@ -0,0 +1,146 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = 'registry_metadata_id',
full_refresh = false,
tags = ['curated']
) }}
WITH new_records AS (
SELECT
block_number,
contract_address,
function_input AS registry_id,
token_uri_link,
_inserted_timestamp,
ROW_NUMBER() over (
ORDER BY
contract_address,
registry_id
) AS row_num
FROM
{{ ref('silver_olas__registry_reads') }}
{% if is_incremental() %}
WHERE
_inserted_timestamp > (
SELECT
MAX(_inserted_timestamp)
FROM
{{ this }}
)
OR
CONCAT(
contract_address,
'-',
registry_id
) IN (
SELECT
CONCAT(
contract_address,
'-',
registry_id
)
FROM
{{ this }}
WHERE
NAME IS NULL
)
{% endif %}
),
uri_calls AS (
SELECT
block_number,
contract_address,
registry_id,
token_uri_link,
live.udf_api(token_uri_link) AS resp,
_inserted_timestamp
FROM
new_records
WHERE
row_num <= 100
UNION ALL
SELECT
block_number,
contract_address,
registry_id,
token_uri_link,
live.udf_api(token_uri_link) AS resp,
_inserted_timestamp
FROM
new_records
WHERE
row_num > 100
AND row_num <= 200
UNION ALL
SELECT
block_number,
contract_address,
registry_id,
token_uri_link,
live.udf_api(token_uri_link) AS resp,
_inserted_timestamp
FROM
new_records
WHERE
row_num > 200
),
response AS (
SELECT
resp,
block_number,
contract_address,
registry_id,
token_uri_link,
resp :data :attributes [0] :trait_type :: STRING AS trait_type,
resp :data :attributes [0] :value :: STRING AS trait_value,
REPLACE(
resp :data :code_uri :: STRING,
'ipfs://',
'https://gateway.autonolas.tech/ipfs/'
) AS code_uri_link,
resp :data :description :: STRING AS description,
CASE
WHEN resp :data :image :: STRING ILIKE 'ipfs://%' THEN REPLACE(
resp :data :image :: STRING,
'ipfs://',
'https://gateway.autonolas.tech/ipfs/'
)
WHEN resp :data :image :: STRING NOT ILIKE '%://%' THEN CONCAT(
'https://gateway.autonolas.tech/ipfs/',
resp :data :image :: STRING
)
ELSE resp :data :image :: STRING
END AS image_link,
resp :data :name :: STRING AS NAME,
_inserted_timestamp
FROM
uri_calls
)
SELECT
resp,
block_number,
contract_address,
registry_id,
token_uri_link,
trait_type,
trait_value,
code_uri_link,
description,
image_link,
NAME,
_inserted_timestamp,
{{ dbt_utils.generate_surrogate_key(
['contract_address','registry_id']
) }} AS registry_metadata_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp,
'{{ invocation_id }}' AS _invocation_id
FROM
response
WHERE
resp :: STRING NOT ILIKE '%merkledag: not found%'
AND resp :: STRING NOT ILIKE '%tuple index out of range%'
AND resp :: STRING NOT ILIKE '%"error":%'

View File

@ -0,0 +1,19 @@
version: 2
models:
- name: silver_olas__registry_metadata
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- REGISTRY_METADATA_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: CONTRACT_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: REGISTRY_ID
tests:
- not_null

View File

@ -0,0 +1,35 @@
{{ config(
materialized = 'view'
) }}
SELECT
m.name,
m.description,
m.registry_id,
m.contract_address,
CASE
WHEN m.contract_address = '0x9338b5153ae39bb89f50468e608ed9d764b755fd' THEN 'Service'
END AS registry_type,
m.trait_type,
m.trait_value,
m.code_uri_link,
m.image_link,
s.agent_ids,
m.registry_metadata_id,
m.inserted_timestamp,
GREATEST(
COALESCE(
m.modified_timestamp,
'1970-01-01' :: TIMESTAMP
),
COALESCE(
s.modified_timestamp,
'1970-01-01' :: TIMESTAMP
)
) AS modified_timestamp
FROM
{{ ref('silver_olas__registry_metadata') }}
m
LEFT JOIN {{ ref('silver_olas__getservice_reads') }}
s
ON m.registry_id = s.function_input

View File

@ -0,0 +1,16 @@
version: 2
models:
- name: silver_olas__registry_metadata_complete
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- REGISTRY_METADATA_ID
columns:
- name: CONTRACT_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: REGISTRY_ID
tests:
- not_null

View File

@ -0,0 +1,121 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = 'registry_reads_id',
full_refresh = false,
tags = ['curated']
) }}
WITH service_contracts AS (
SELECT
contract_address,
service_id AS registry_id,
MAX(block_number) AS block_number
FROM
{{ ref('silver_olas__service_registrations') }}
{% if is_incremental() %}
WHERE
_inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND CONCAT(
contract_address,
'-',
registry_id
) NOT IN (
SELECT
CONCAT(
contract_address,
'-',
function_input
)
FROM
{{ this }}
)
{% endif %}
GROUP BY
1,
2
),
function_sigs AS (
SELECT
'0xc87b56dd' AS function_sig,
'tokenURI' AS function_name
),
inputs AS (
SELECT
contract_address,
block_number,
function_sig,
function_name,
registry_id AS function_input,
CONCAT(
function_sig,
LPAD(
SUBSTR(utils.udf_int_to_hex(function_input), 3),
64,
0)
) AS DATA
FROM
service_contracts
JOIN function_sigs
ON 1 = 1
),
contract_reads AS (
SELECT
contract_address,
block_number,
function_sig,
function_name,
function_input,
DATA,
utils.udf_json_rpc_call(
'eth_call',
[{ 'to': contract_address, 'from': null, 'data': data }, utils.udf_int_to_hex(block_number) ]
) AS rpc_request,
live.udf_api(
'POST',
CONCAT(
'{service}',
'/',
'{Authentication}'
),{},
rpc_request,
'Vault/prod/gnosis/quicknode/mainnet'
) AS read_output,
SYSDATE() AS _inserted_timestamp
FROM
inputs
)
SELECT
read_output,
read_output :data :id :: STRING AS read_id,
read_output :data :result :: STRING AS read_result,
SPLIT(
read_id,
'-'
) AS read_id_object,
function_sig,
function_name,
function_input,
DATA,
utils.udf_hex_to_string(SUBSTR(read_result, 131)) AS token_uri_link,
contract_address,
block_number,
_inserted_timestamp,
{{ dbt_utils.generate_surrogate_key(
['contract_address','function_input']
) }} AS registry_reads_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp,
'{{ invocation_id }}' AS _invocation_id
FROM
contract_reads
WHERE
token_uri_link IS NOT NULL
AND LENGTH(token_uri_link) <> 0

View File

@ -0,0 +1,19 @@
version: 2
models:
- name: silver_olas__registry_reads
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- REGISTRY_READS_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: CONTRACT_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TOKEN_URI_LINK
tests:
- not_null

View File

@ -0,0 +1,53 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = 'block_number',
cluster_by = ['block_timestamp::DATE'],
tags = ['curated','reorg']
) }}
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
event_index,
topics [0] :: STRING AS topic_0,
topics [1] :: STRING AS topic_1,
topics [2] :: STRING AS topic_2,
topics [3] :: STRING AS topic_3,
'CreateMultisigWithAgents' AS event_name,
DATA,
regexp_substr_all(SUBSTR(DATA, 3, len(DATA)), '.{64}') AS segmented_data,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
topic_1
)
) AS id,
CONCAT('0x', SUBSTR(topic_2, 27, 40)) AS multisig_address,
_log_id,
_inserted_timestamp,
{{ dbt_utils.generate_surrogate_key(
['tx_hash','event_index']
) }} AS create_service_multisigs_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp,
'{{ invocation_id }}' AS _invocation_id
FROM
{{ ref('silver__logs') }}
WHERE
contract_address = '0x9338b5153ae39bb89f50468e608ed9d764b755fd' --Service Registry (AUTONOLAS-SERVICE-V1)
AND topic_0 = '0x2d53f895cd5faf3cddba94a25c2ced2105885b5b37450ff430ffa3cbdf332c74' --CreateMultisigWithAgents
AND tx_status = 'SUCCESS'
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
{% endif %}

View File

@ -0,0 +1,24 @@
version: 2
models:
- name: silver_olas__create_service_multisigs
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: BLOCK_TIMESTAMP
tests:
- not_null
- name: TX_HASH
tests:
- not_null
- name: MULTISIG_ADDRESS
tests:
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: ID
tests:
- not_null

View File

@ -0,0 +1,61 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = 'block_number',
cluster_by = ['block_timestamp::DATE'],
tags = ['curated','reorg']
) }}
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
event_index,
topics [0] :: STRING AS topic_0,
topics [1] :: STRING AS topic_1,
topics [2] :: STRING AS topic_2,
topics [3] :: STRING AS topic_3,
'Deliver' AS event_name,
DATA,
regexp_substr_all(SUBSTR(DATA, 3, len(DATA)), '.{64}') AS segmented_data,
CONCAT('0x', SUBSTR(topic_1, 27, 40)) AS sender_address,
utils.udf_hex_to_int(
segmented_data [0] :: STRING
) AS request_id,
CONCAT(
'0x',
segmented_data [3] :: STRING
) AS data_payload,
CONCAT(
'https://gateway.autonolas.tech/ipfs/f01701220',
segmented_data [3] :: STRING,
'/',
request_id
) AS delivery_link,
_log_id,
_inserted_timestamp,
{{ dbt_utils.generate_surrogate_key(
['tx_hash','event_index']
) }} AS mech_delivers_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp,
'{{ invocation_id }}' AS _invocation_id
FROM
{{ ref('silver__logs') }}
WHERE
contract_address = '0x77af31de935740567cf4ff1986d04b2c964a786a' --AgentMech
AND topic_0 = '0x0cd979445339c62199996f208428d987b1cea24d18e62b79ec24d94b636e8b70' --Deliver
AND tx_status = 'SUCCESS'
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
{% endif %}

View File

@ -0,0 +1,31 @@
version: 2
models:
- name: silver_olas__mech_delivers
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: BLOCK_TIMESTAMP
tests:
- not_null
- name: TX_HASH
tests:
- not_null
- name: SENDER_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: REQUEST_ID
tests:
- not_null
- name: DATA_PAYLOAD
tests:
- not_null
- name: DELIVERY_LINK
tests:
- not_null

View File

@ -0,0 +1,60 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = 'block_number',
cluster_by = ['block_timestamp::DATE'],
tags = ['curated','reorg']
) }}
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
event_index,
topics [0] :: STRING AS topic_0,
topics [1] :: STRING AS topic_1,
topics [2] :: STRING AS topic_2,
topics [3] :: STRING AS topic_3,
'Request' AS event_name,
DATA,
regexp_substr_all(SUBSTR(DATA, 3, len(DATA)), '.{64}') AS segmented_data,
CONCAT('0x', SUBSTR(topic_1, 27, 40)) AS sender_address,
utils.udf_hex_to_int(
segmented_data [0] :: STRING
) AS request_id,
CONCAT(
'0x',
segmented_data [3] :: STRING
) AS data_payload,
CONCAT(
'https://gateway.autonolas.tech/ipfs/f01701220',
segmented_data [3] :: STRING,
'/metadata.json'
) AS prompt_link,
_log_id,
_inserted_timestamp,
{{ dbt_utils.generate_surrogate_key(
['tx_hash','event_index']
) }} AS mech_requests_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp,
'{{ invocation_id }}' AS _invocation_id
FROM
{{ ref('silver__logs') }}
WHERE
contract_address = '0x77af31de935740567cf4ff1986d04b2c964a786a' --AgentMech
AND topic_0 = '0x4bda649efe6b98b0f9c1d5e859c29e20910f45c66dabfe6fad4a4881f7faf9cc' --Request
AND tx_status = 'SUCCESS'
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
{% endif %}

View File

@ -0,0 +1,31 @@
version: 2
models:
- name: silver_olas__mech_requests
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: BLOCK_TIMESTAMP
tests:
- not_null
- name: TX_HASH
tests:
- not_null
- name: SENDER_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: REQUEST_ID
tests:
- not_null
- name: DATA_PAYLOAD
tests:
- not_null
- name: PROMPT_LINK
tests:
- not_null

View File

@ -0,0 +1,209 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = 'block_number',
cluster_by = ['block_timestamp::DATE'],
tags = ['curated','reorg']
) }}
WITH deposits AS (
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
event_index,
topics [0] :: STRING AS topic_0,
topics [1] :: STRING AS topic_1,
topics [2] :: STRING AS topic_2,
topics [3] :: STRING AS topic_3,
'Deposit' AS event_name,
DATA,
regexp_substr_all(SUBSTR(DATA, 3, len(DATA)), '.{64}') AS segmented_data,
CONCAT('0x', SUBSTR(topic_1, 27, 40)) AS sender_address,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [0]
)
) AS amount_unadj,
(amount_unadj / pow(10, 18)) :: FLOAT AS amount_adj,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [1]
)
) AS balance_unadj,
(balance_unadj / pow(10, 18)) :: FLOAT AS balance_adj,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [2]
)
) AS available_rewards_unadj,
(available_rewards_unadj / pow(10, 18)) :: FLOAT AS available_rewards_adj,
CASE
WHEN contract_address = '0xee9f19b5df06c7e8bfc7b28745dcf944c504198a' THEN 'Alpha'
WHEN contract_address = '0x43fb32f25dce34eb76c78c7a42c8f40f84bcd237' THEN 'Coastal'
WHEN contract_address = '0x2ef503950be67a98746f484da0bbada339df3326' THEN 'Alpine'
WHEN contract_address = '0x5add592ce0a1b5dcecebb5dcac086cd9f9e3ea5c' THEN 'Everest'
END AS program_name,
_log_id,
_inserted_timestamp
FROM
{{ ref('silver__logs') }}
WHERE
contract_address IN (
'0xee9f19b5df06c7e8bfc7b28745dcf944c504198a',
--StakingProxy (Alpha)
'0x43fb32f25dce34eb76c78c7a42c8f40f84bcd237',
--ServiceStakingTokenMechUsage (Coastal)
'0x2ef503950be67a98746f484da0bbada339df3326',
--ServiceStakingTokenMechUsage (Alpine)
'0x5add592ce0a1b5dcecebb5dcac086cd9f9e3ea5c' --ServiceStakingTokenMechUsage (Everest)
)
AND topic_0 = '0x36af321ec8d3c75236829c5317affd40ddb308863a1236d2d277a4025cccee1e' --Deposit (erc20)
AND tx_status = 'SUCCESS'
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
{% endif %}
),
withdrawals AS (
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
event_index,
topics [0] :: STRING AS topic_0,
topics [1] :: STRING AS topic_1,
topics [2] :: STRING AS topic_2,
topics [3] :: STRING AS topic_3,
'Deposit' AS event_name,
DATA,
regexp_substr_all(SUBSTR(DATA, 3, len(DATA)), '.{64}') AS segmented_data,
CONCAT('0x', SUBSTR(topic_1, 27, 40)) AS withdrawer_address,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [0]
)
) AS amount_unadj,
(amount_unadj / pow(10, 18)) :: FLOAT AS amount_adj,
CASE
WHEN contract_address = '0xee9f19b5df06c7e8bfc7b28745dcf944c504198a' THEN 'Alpha'
WHEN contract_address = '0x43fb32f25dce34eb76c78c7a42c8f40f84bcd237' THEN 'Coastal'
WHEN contract_address = '0x2ef503950be67a98746f484da0bbada339df3326' THEN 'Alpine'
WHEN contract_address = '0x5add592ce0a1b5dcecebb5dcac086cd9f9e3ea5c' THEN 'Everest'
END AS program_name,
_log_id,
_inserted_timestamp
FROM
{{ ref('silver__logs') }}
WHERE
contract_address IN (
'0xee9f19b5df06c7e8bfc7b28745dcf944c504198a',
--StakingProxy (Alpha)
'0x43fb32f25dce34eb76c78c7a42c8f40f84bcd237',
--ServiceStakingTokenMechUsage (Coastal)
'0x2ef503950be67a98746f484da0bbada339df3326',
--ServiceStakingTokenMechUsage (Alpine)
'0x5add592ce0a1b5dcecebb5dcac086cd9f9e3ea5c' --ServiceStakingTokenMechUsage (Everest)
)
AND topic_0 = '0x884edad9ce6fa2440d8a54cc123490eb96d2768479d49ff9c7366125a9424364' --Withdraw (erc20)
AND tx_status = 'SUCCESS'
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
{% endif %}
),
all_evt AS (
SELECT
block_number,
block_timestamp,
tx_hash,
event_index,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
event_name,
sender_address AS staker_address,
amount_unadj,
amount_adj,
program_name,
_log_id,
_inserted_timestamp
FROM
deposits
UNION ALL
SELECT
block_number,
block_timestamp,
tx_hash,
event_index,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
event_name,
withdrawer_address AS staker_address,
amount_unadj,
amount_adj,
program_name,
_log_id,
_inserted_timestamp
FROM
withdrawals
)
SELECT
b.block_number,
b.block_timestamp,
b.tx_hash,
b.origin_function_signature,
b.origin_from_address,
b.origin_to_address,
b.contract_address,
b.event_index,
b.event_name,
b.staker_address,
b.amount_unadj,
b.amount_adj AS amount,
ROUND(
b.amount_adj * p.price,
2
) AS amount_usd,
'OLAS' AS token_symbol,
'0xce11e14225575945b8e6dc0d4f2dd4c570f79d9f' AS token_address,
b.program_name,
b._log_id,
b._inserted_timestamp,
{{ dbt_utils.generate_surrogate_key(
['tx_hash','event_index']
) }} AS olas_staking_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp,
'{{ invocation_id }}' AS _invocation_id
FROM
all_evt b
LEFT JOIN {{ ref('price__ez_prices_hourly') }}
p
ON p.token_address = '0xce11e14225575945b8e6dc0d4f2dd4c570f79d9f'
AND DATE_TRUNC(
'hour',
block_timestamp
) = p.hour

View File

@ -0,0 +1,28 @@
version: 2
models:
- name: silver_olas__olas_staking
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: BLOCK_TIMESTAMP
tests:
- not_null
- name: TX_HASH
tests:
- not_null
- name: STAKER_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: AMOUNT
tests:
- not_null
- name: PROGRAM_NAME
tests:
- not_null

View File

@ -0,0 +1,254 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = 'block_number',
cluster_by = ['block_timestamp::DATE'],
tags = ['curated','reorg']
) }}
WITH decoded_evt AS (
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
event_index,
event_name,
topics [0] :: STRING AS topic_0,
topics [1] :: STRING AS topic_1,
topics [2] :: STRING AS topic_2,
topics [3] :: STRING AS topic_3,
decoded_flat,
CASE
WHEN contract_address = '0xee9f19b5df06c7e8bfc7b28745dcf944c504198a' THEN 'Alpha'
WHEN contract_address = '0x43fb32f25dce34eb76c78c7a42c8f40f84bcd237' THEN 'Coastal'
WHEN contract_address = '0x2ef503950be67a98746f484da0bbada339df3326' THEN 'Alpine'
WHEN contract_address = '0x5add592ce0a1b5dcecebb5dcac086cd9f9e3ea5c' THEN 'Everest'
END AS program_name,
_log_id,
_inserted_timestamp
FROM
{{ ref('silver__decoded_logs') }}
WHERE
contract_address IN (
'0xee9f19b5df06c7e8bfc7b28745dcf944c504198a',
-- StakingProxy (Alpha)
'0x43fb32f25dce34eb76c78c7a42c8f40f84bcd237',
--ServiceStakingTokenMechUsage (Coastal)
'0x2ef503950be67a98746f484da0bbada339df3326',
--ServiceStakingTokenMechUsage (Alpine)
'0x5add592ce0a1b5dcecebb5dcac086cd9f9e3ea5c' --ServiceStakingTokenMechUsage (Everest)
)
AND topic_0 IN (
'0x48b735a18ed32318d316214e41387be29c52e29df4598f2b8e40fa843be3f940',
'0x06a98bdd4732811ab3214800ed1ada2dce66a2bce301d250c3ca7d6b461ee666',
'0x21d81d5d656869e8ce3ba8d65526a2f0dbbcd3d36f5f9999eb7c84360e45eced'
) --Checkpoint
AND tx_status = 'SUCCESS'
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
{% endif %}
),
checkpoint_type1 AS (
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
event_index,
event_name,
topic_0,
topic_1,
topic_2,
topic_3,
decoded_flat,
TRY_TO_NUMBER(
decoded_flat :epoch :: STRING
) AS epoch,
TRY_TO_NUMBER(
decoded_flat :epochLength :: STRING
) AS epoch_length,
TRY_TO_NUMBER(
decoded_flat :availableRewards :: STRING
) AS available_rewards_unadj,
(available_rewards_unadj / pow(10, 18)) :: FLOAT AS available_rewards_adj,
decoded_flat :rewards AS rewards,
decoded_flat :serviceIds AS service_ids,
ARRAY_SIZE(service_ids) AS num_services,
program_name,
_log_id,
_inserted_timestamp
FROM
decoded_evt
WHERE
topic_0 = '0x48b735a18ed32318d316214e41387be29c52e29df4598f2b8e40fa843be3f940'
),
checkpoint_type2 AS (
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
event_index,
event_name,
topic_0,
topic_1,
topic_2,
topic_3,
decoded_flat,
TRY_TO_NUMBER(
decoded_flat :epoch :: STRING
) AS epoch,
NULL AS epoch_length,
TRY_TO_NUMBER(
decoded_flat :availableRewards :: STRING
) AS available_rewards_unadj,
(available_rewards_unadj / pow(10, 18)) :: FLOAT AS available_rewards_adj,
decoded_flat :rewards AS rewards,
decoded_flat :serviceIds AS service_ids,
ARRAY_SIZE(service_ids) AS num_services,
program_name,
_log_id,
_inserted_timestamp
FROM
decoded_evt
WHERE
topic_0 = '0x06a98bdd4732811ab3214800ed1ada2dce66a2bce301d250c3ca7d6b461ee666'
),
checkpoint_type3 AS (
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
event_index,
event_name,
topic_0,
topic_1,
topic_2,
topic_3,
decoded_flat,
NULL AS epoch,
NULL AS epoch_length,
TRY_TO_NUMBER(
decoded_flat :availableRewards :: STRING
) AS available_rewards_unadj,
(available_rewards_unadj / pow(10, 18)) :: FLOAT AS available_rewards_adj,
NULL AS rewards,
NULL AS service_ids,
TRY_TO_NUMBER(
decoded_flat :numServices :: STRING
) AS num_services,
program_name,
_log_id,
_inserted_timestamp
FROM
decoded_evt
WHERE
topic_0 = '0x21d81d5d656869e8ce3ba8d65526a2f0dbbcd3d36f5f9999eb7c84360e45eced'
),
all_checkpoints AS (
SELECT
*
FROM
checkpoint_type1
UNION ALL
SELECT
*
FROM
checkpoint_type2
UNION ALL
SELECT
*
FROM
checkpoint_type3
),
evt_flat AS (
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
event_index,
event_name,
epoch,
epoch_length,
available_rewards_unadj,
available_rewards_adj,
rewards,
service_ids,
num_services,
TRY_TO_NUMBER(
f1.value :: STRING
) AS service_id,
TRY_TO_NUMBER(
f2.value :: STRING
) AS reward_unadj,
(reward_unadj / pow(10, 18)) :: FLOAT AS reward_adj,
program_name,
_log_id,
_inserted_timestamp
FROM
all_checkpoints,
LATERAL FLATTEN(
input => service_ids
) AS f1,
LATERAL FLATTEN(
input => rewards
) AS f2
WHERE
f1.index = f2.index
)
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
event_index,
event_name,
epoch,
epoch_length,
available_rewards_unadj,
available_rewards_adj,
service_id,
reward_unadj,
reward_adj,
num_services,
program_name,
rewards,
service_ids,
_log_id,
_inserted_timestamp,
{{ dbt_utils.generate_surrogate_key(
['tx_hash','event_index','service_id']
) }} AS service_checkpoint_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp,
'{{ invocation_id }}' AS _invocation_id
FROM
evt_flat

View File

@ -0,0 +1,29 @@
version: 2
models:
- name: silver_olas__service_checkpoint
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- SERVICE_CHECKPOINT_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: BLOCK_TIMESTAMP
tests:
- not_null
- name: TX_HASH
tests:
- not_null
- name: EPOCH
tests:
- not_null
- name: SERVICE_ID
tests:
- not_null
- name: REWARD_ADJ
tests:
- not_null
- name: PROGRAM_NAME
tests:
- not_null

View File

@ -0,0 +1,57 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = 'block_number',
cluster_by = ['block_timestamp::DATE'],
tags = ['curated','reorg']
) }}
WITH service_multisigs AS (
SELECT
DISTINCT multisig_address,
id
FROM
{{ ref('silver_olas__create_service_multisigs') }}
)
SELECT
d.block_number,
d.block_timestamp,
d.tx_hash,
d.origin_function_signature,
d.origin_from_address,
d.origin_to_address,
d.contract_address,
d.event_index,
s.multisig_address,
s.id AS service_id,
d.topics [0] :: STRING AS topic_0,
d.topics [1] :: STRING AS topic_1,
d.topics [2] :: STRING AS topic_2,
d.topics [3] :: STRING AS topic_3,
d.data,
regexp_substr_all(SUBSTR(d.data, 3, len(d.data)), '.{64}') AS segmented_data,
d._log_id,
d._inserted_timestamp,
{{ dbt_utils.generate_surrogate_key(
['d.tx_hash','d.event_index']
) }} AS service_event_logs_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp,
'{{ invocation_id }}' AS _invocation_id
FROM
{{ ref('silver__logs') }}
d
INNER JOIN service_multisigs s
ON d.origin_to_address = s.multisig_address
WHERE
d.tx_status = 'SUCCESS'
{% if is_incremental() %}
AND d._inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
{% endif %}

View File

@ -0,0 +1,28 @@
version: 2
models:
- name: silver_olas__service_event_logs
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: BLOCK_TIMESTAMP
tests:
- not_null
- name: TX_HASH
tests:
- not_null
- name: MULTISIG_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: SERVICE_ID
tests:
- not_null
- name: DATA
tests:
- not_null

View File

@ -0,0 +1,158 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = 'block_number',
cluster_by = ['block_timestamp::DATE'],
tags = ['curated','reorg']
) }}
WITH registry_evt AS (
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
event_index,
topics [0] :: STRING AS topic_0,
topics [1] :: STRING AS topic_1,
topics [2] :: STRING AS topic_2,
topics [3] :: STRING AS topic_3,
CASE
WHEN topic_0 = '0xb34c1e02384201736eb4693b9b173306cb41bff12f15894dea5773088e9a3b1c' THEN 'CreateService'
WHEN topic_0 = '0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef' THEN 'Transfer'
END AS event_name,
DATA,
regexp_substr_all(SUBSTR(DATA, 3, len(DATA)), '.{64}') AS segmented_data,
_log_id,
_inserted_timestamp
FROM
{{ ref('silver__logs') }}
WHERE
contract_address = '0x9338b5153ae39bb89f50468e608ed9d764b755fd' --Service Registry (AUTONOLAS-SERVICE-V1)
AND topic_0 IN (
'0xb34c1e02384201736eb4693b9b173306cb41bff12f15894dea5773088e9a3b1c',
--CreateService (for services)
'0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef' --Transfer
)
AND tx_status = 'SUCCESS'
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
{% endif %}
),
transfers AS (
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
event_index,
topic_0,
topic_1,
topic_2,
topic_3,
event_name,
DATA,
segmented_data,
CONCAT('0x', SUBSTR(topic_1, 27, 40)) AS from_address,
CONCAT('0x', SUBSTR(topic_2, 27, 40)) AS to_address,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
topic_3
)
) AS id,
_log_id,
_inserted_timestamp
FROM
registry_evt
WHERE
event_name = 'Transfer'
),
multisigs AS (
SELECT
DISTINCT multisig_address,
id,
contract_address
FROM
{{ ref('silver_olas__create_service_multisigs') }}
qualify(ROW_NUMBER() over (PARTITION BY multisig_address
ORDER BY
block_timestamp DESC)) = 1 --get latest service multisig address
),
services AS (
SELECT
r.block_number,
r.block_timestamp,
r.tx_hash,
r.origin_function_signature,
r.origin_from_address,
r.origin_to_address,
r.contract_address,
r.event_index,
r.topic_0,
r.topic_1,
r.topic_2,
r.topic_3,
r.event_name,
r.data,
r.segmented_data,
TRY_TO_NUMBER(utils.udf_hex_to_int(r.topic_1)) AS service_id,
CONCAT(
'0x',
r.segmented_data [0] :: STRING
) AS config_hash,
t.from_address,
t.to_address AS owner_address,
m.multisig_address,
r._log_id,
r._inserted_timestamp
FROM
registry_evt r
LEFT JOIN transfers t
ON r.tx_hash = t.tx_hash
AND r.contract_address = t.contract_address
AND service_id = t.id
LEFT JOIN multisigs m
ON r.contract_address = m.contract_address
AND service_id = m.id
WHERE
r.event_name = 'CreateService'
)
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
event_index,
event_name,
owner_address,
multisig_address,
service_id,
config_hash,
_log_id,
_inserted_timestamp,
{{ dbt_utils.generate_surrogate_key(
['tx_hash','event_index']
) }} AS service_registration_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp,
'{{ invocation_id }}' AS _invocation_id
FROM
services qualify(ROW_NUMBER() over (PARTITION BY _log_id
ORDER BY
_inserted_timestamp DESC)) = 1

View File

@ -0,0 +1,29 @@
version: 2
models:
- name: silver_olas__service_registrations
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: BLOCK_TIMESTAMP
tests:
- not_null
- name: TX_HASH
tests:
- not_null
- name: OWNER_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: MULTISIG_ADDRESS
tests:
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: SERVICE_ID
tests:
- not_null

View File

@ -0,0 +1,346 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = 'block_number',
cluster_by = ['block_timestamp::DATE'],
tags = ['curated','reorg']
) }}
WITH stake AS (
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
event_index,
topics [0] :: STRING AS topic_0,
topics [1] :: STRING AS topic_1,
topics [2] :: STRING AS topic_2,
topics [3] :: STRING AS topic_3,
'ServiceStaked' AS event_name,
DATA,
regexp_substr_all(SUBSTR(DATA, 3, len(DATA)), '.{64}') AS segmented_data,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
topic_1
)
) AS service_id,
CONCAT('0x', SUBSTR(topic_2, 27, 40)) AS owner_address,
CONCAT('0x', SUBSTR(topic_3, 27, 40)) AS multisig_address,
utils.udf_hex_to_int(
segmented_data [0] :: STRING
) AS epoch,
ARRAY_CONSTRUCT(
TRY_TO_NUMBER(utils.udf_hex_to_int(segmented_data [3] :: STRING)),
TRY_TO_NUMBER(utils.udf_hex_to_int(segmented_data [4] :: STRING))
) AS nonces,
CASE
WHEN contract_address = '0xee9f19b5df06c7e8bfc7b28745dcf944c504198a' THEN 'Alpha'
WHEN contract_address = '0x43fb32f25dce34eb76c78c7a42c8f40f84bcd237' THEN 'Coastal'
WHEN contract_address = '0x2ef503950be67a98746f484da0bbada339df3326' THEN 'Alpine'
END AS program_name,
_log_id,
_inserted_timestamp
FROM
{{ ref('silver__logs') }}
WHERE
contract_address IN (
'0xee9f19b5df06c7e8bfc7b28745dcf944c504198a',
--StakingProxy (Alpha)
'0x43fb32f25dce34eb76c78c7a42c8f40f84bcd237',
--ServiceStakingTokenMechUsage (Coastal)
'0x2ef503950be67a98746f484da0bbada339df3326' --ServiceStakingTokenMechUsage (Alpine)
)
AND topic_0 = '0xaa6b005b4958114a0c90492461c24af6525ae0178db7fbf44125ae9217c69ccb' --ServiceStaked
AND tx_status = 'SUCCESS'
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
{% endif %}
),
stake_everest AS (
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
event_index,
topics [0] :: STRING AS topic_0,
topics [1] :: STRING AS topic_1,
topics [2] :: STRING AS topic_2,
topics [3] :: STRING AS topic_3,
'ServiceStaked' AS event_name,
DATA,
regexp_substr_all(SUBSTR(DATA, 3, len(DATA)), '.{64}') AS segmented_data,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
topic_1
)
) AS service_id,
CONCAT('0x', SUBSTR(topic_2, 27, 40)) AS owner_address,
CONCAT('0x', SUBSTR(topic_3, 27, 40)) AS multisig_address,
NULL AS epoch,
ARRAY_CONSTRUCT(
TRY_TO_NUMBER(utils.udf_hex_to_int(segmented_data [0] :: STRING)),
TRY_TO_NUMBER(utils.udf_hex_to_int(segmented_data [1] :: STRING))
) AS nonces,
'Everest' AS program_name,
_log_id,
_inserted_timestamp
FROM
{{ ref('silver__logs') }}
WHERE
contract_address = '0x5add592ce0a1b5dcecebb5dcac086cd9f9e3ea5c' --ServiceStakingTokenMechUsage (Everest)
AND topic_0 = '0x5d43ac9b1b213902df90d405b0006308578486b6c62182c5df202ed572c844e4' --ServiceStaked
AND tx_status = 'SUCCESS'
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
{% endif %}
),
unstake AS (
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
event_index,
topics [0] :: STRING AS topic_0,
topics [1] :: STRING AS topic_1,
topics [2] :: STRING AS topic_2,
topics [3] :: STRING AS topic_3,
'ServiceUnstaked' AS event_name,
DATA,
regexp_substr_all(SUBSTR(DATA, 3, len(DATA)), '.{64}') AS segmented_data,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
topic_1
)
) AS service_id,
CONCAT('0x', SUBSTR(topic_2, 27, 40)) AS owner_address,
CONCAT('0x', SUBSTR(topic_3, 27, 40)) AS multisig_address,
utils.udf_hex_to_int(
segmented_data [0] :: STRING
) AS epoch,
utils.udf_hex_to_int(
segmented_data [2] :: STRING
) AS reward,
NULL AS ts_start,
ARRAY_CONSTRUCT(
TRY_TO_NUMBER(utils.udf_hex_to_int(segmented_data [4] :: STRING)),
TRY_TO_NUMBER(utils.udf_hex_to_int(segmented_data [5] :: STRING))
) AS nonces,
CASE
WHEN contract_address = '0xee9f19b5df06c7e8bfc7b28745dcf944c504198a' THEN 'Alpha'
WHEN contract_address = '0x43fb32f25dce34eb76c78c7a42c8f40f84bcd237' THEN 'Coastal'
WHEN contract_address = '0x2ef503950be67a98746f484da0bbada339df3326' THEN 'Alpine'
END AS program_name,
_log_id,
_inserted_timestamp
FROM
{{ ref('silver__logs') }}
WHERE
contract_address IN (
'0xee9f19b5df06c7e8bfc7b28745dcf944c504198a',
--StakingProxy (Alpha)
'0x43fb32f25dce34eb76c78c7a42c8f40f84bcd237',
--ServiceStakingTokenMechUsage (Coastal)
'0x2ef503950be67a98746f484da0bbada339df3326' --ServiceStakingTokenMechUsage (Alpine)
)
AND topic_0 = '0x950733f4c0bf951b8e770f3cc619a4288e7b59b1236d59aeaf2c238488e8ae81' --ServiceUnstaked
AND tx_status = 'SUCCESS'
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
{% endif %}
),
unstake_everest AS (
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
event_index,
topics [0] :: STRING AS topic_0,
topics [1] :: STRING AS topic_1,
topics [2] :: STRING AS topic_2,
topics [3] :: STRING AS topic_3,
'ServiceUnstaked' AS event_name,
DATA,
regexp_substr_all(SUBSTR(DATA, 3, len(DATA)), '.{64}') AS segmented_data,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
topic_1
)
) AS service_id,
CONCAT('0x', SUBSTR(topic_2, 27, 40)) AS owner_address,
CONCAT('0x', SUBSTR(topic_3, 27, 40)) AS multisig_address,
NULL AS epoch,
utils.udf_hex_to_int(
segmented_data [1] :: STRING
) AS reward,
utils.udf_hex_to_int(
segmented_data [2] :: STRING
) AS ts_start,
ARRAY_CONSTRUCT(
TRY_TO_NUMBER(utils.udf_hex_to_int(segmented_data [4] :: STRING)),
TRY_TO_NUMBER(utils.udf_hex_to_int(segmented_data [5] :: STRING))
) AS nonces,
'Everest' AS program_name,
_log_id,
_inserted_timestamp
FROM
{{ ref('silver__logs') }}
WHERE
contract_address = '0x5add592ce0a1b5dcecebb5dcac086cd9f9e3ea5c' --ServiceStakingTokenMechUsage (Everest)
AND topic_0 = '0x246ee6115bfd84e00097b16569c2ff2f822026bb9595a82cd2c1e69d4b6ea50c' --ServiceUnstaked
AND tx_status = 'SUCCESS'
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
{% endif %}
),
all_evt AS (
SELECT
block_number,
block_timestamp,
tx_hash,
event_index,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
event_name,
service_id,
owner_address,
multisig_address,
epoch,
nonces,
program_name,
_log_id,
_inserted_timestamp
FROM
stake
UNION ALL
SELECT
block_number,
block_timestamp,
tx_hash,
event_index,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
event_name,
service_id,
owner_address,
multisig_address,
epoch,
nonces,
program_name,
_log_id,
_inserted_timestamp
FROM
stake_everest
UNION ALL
SELECT
block_number,
block_timestamp,
tx_hash,
event_index,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
event_name,
service_id,
owner_address,
multisig_address,
epoch,
nonces,
program_name,
_log_id,
_inserted_timestamp
FROM
unstake
UNION ALL
SELECT
block_number,
block_timestamp,
tx_hash,
event_index,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
event_name,
service_id,
owner_address,
multisig_address,
epoch,
nonces,
program_name,
_log_id,
_inserted_timestamp
FROM
unstake_everest
)
SELECT
block_number,
block_timestamp,
tx_hash,
event_index,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
event_name,
service_id,
owner_address,
multisig_address,
epoch,
nonces,
program_name,
_log_id,
_inserted_timestamp,
{{ dbt_utils.generate_surrogate_key(
['tx_hash','event_index']
) }} AS service_staking_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp,
'{{ invocation_id }}' AS _invocation_id
FROM
all_evt

View File

@ -0,0 +1,32 @@
version: 2
models:
- name: silver_olas__service_staking
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: BLOCK_TIMESTAMP
tests:
- not_null
- name: TX_HASH
tests:
- not_null
- name: OWNER_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: MULTISIG_ADDRESS
tests:
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: SERVICE_ID
tests:
- not_null
- name: PROGRAM_NAME
tests:
- not_null

View File

@ -0,0 +1,144 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = 'block_number',
cluster_by = ['block_timestamp::DATE'],
tags = ['curated','reorg']
) }}
WITH decoded_evt AS (
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
event_index,
event_name,
topics [0] :: STRING AS topic_0,
topics [1] :: STRING AS topic_1,
topics [2] :: STRING AS topic_2,
topics [3] :: STRING AS topic_3,
decoded_flat,
TRY_TO_NUMBER(
decoded_flat :epoch :: STRING
) AS epoch,
decoded_flat :multisigs AS multisigs,
decoded_flat :owners AS owners,
decoded_flat :serviceIds AS service_ids,
ARRAY_SIZE(service_ids) AS num_services,
decoded_flat :serviceInactivity AS service_inactivities,
CASE
WHEN contract_address = '0xee9f19b5df06c7e8bfc7b28745dcf944c504198a' THEN 'Alpha'
WHEN contract_address = '0x43fb32f25dce34eb76c78c7a42c8f40f84bcd237' THEN 'Coastal'
WHEN contract_address = '0x2ef503950be67a98746f484da0bbada339df3326' THEN 'Alpine'
END AS program_name,
_log_id,
_inserted_timestamp
FROM
{{ ref('silver__decoded_logs') }}
WHERE
contract_address IN (
'0xee9f19b5df06c7e8bfc7b28745dcf944c504198a',
-- StakingProxy (Alpha)
'0x43fb32f25dce34eb76c78c7a42c8f40f84bcd237',
--ServiceStakingTokenMechUsage (Coastal)
'0x2ef503950be67a98746f484da0bbada339df3326' --ServiceStakingTokenMechUsage (Alpine)
)
AND topic_0 = '0xd19a3d42ed383465e4058c322d9411aeac76ddb8454d22e139fc99808bd56952' --ServicesEvicted
AND tx_status = 'SUCCESS'
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
{% endif %}
),
evt_flat AS (
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
event_index,
event_name,
multisigs,
owners,
service_ids,
num_services,
service_inactivities,
epoch,
TRY_TO_NUMBER(
f1.value :: STRING
) AS service_id,
LOWER(
f2.value :: STRING
) AS multisig_address,
LOWER(
f3.value :: STRING
) AS owner_address,
TRY_TO_NUMBER(
f4.value :: STRING
) AS service_inactivity,
program_name,
_log_id,
_inserted_timestamp
FROM
decoded_evt,
LATERAL FLATTEN(
input => service_ids
) AS f1,
LATERAL FLATTEN(
input => multisigs
) AS f2,
LATERAL FLATTEN(
input => owners
) AS f3,
LATERAL FLATTEN(
input => service_inactivities
) AS f4
WHERE
f1.index = f2.index
AND f2.index = f3.index
AND f3.index = f4.index
)
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
event_index,
event_name,
epoch,
service_id,
owner_address,
multisig_address,
service_inactivity,
program_name,
num_services,
multisigs,
owners,
service_ids,
service_inactivities,
_log_id,
_inserted_timestamp,
{{ dbt_utils.generate_surrogate_key(
['tx_hash','event_index','service_id']
) }} AS services_evicted_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp,
'{{ invocation_id }}' AS _invocation_id
FROM
evt_flat

View File

@ -0,0 +1,39 @@
version: 2
models:
- name: silver_olas__services_evicted
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- SERVICES_EVICTED_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: BLOCK_TIMESTAMP
tests:
- not_null
- name: TX_HASH
tests:
- not_null
- name: OWNER_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: MULTISIG_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: SERVICE_ID
tests:
- not_null
- name: EPOCH
tests:
- not_null
- name: SERVICE_INACTIVITY
tests:
- not_null
- name: PROGRAM_NAME
tests:
- not_null