WIP bridge models

This commit is contained in:
drethereum 2024-12-10 11:14:25 -07:00
parent cfa96bee57
commit 91177eb2d5
9 changed files with 959 additions and 6 deletions

View File

@ -0,0 +1,239 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = "block_number",
cluster_by = ['block_timestamp::DATE'],
tags = ['curated','reorg']
) }}
WITH base_evt AS (
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
'axelar' AS NAME,
event_index,
topics [0] :: STRING AS topic_0,
event_name,
decoded_flat AS decoded_log,
TRY_TO_NUMBER(
decoded_log :"amount" :: STRING
) AS amount,
decoded_log :"destinationChain" :: STRING AS destinationChain,
LOWER(
decoded_log :"destinationContractAddress" :: STRING
) AS destinationContractAddress,
decoded_log :"payload" :: STRING AS payload,
origin_from_address AS recipient,
decoded_log :"payloadHash" :: STRING AS payloadHash,
decoded_log :"sender" :: STRING AS sender,
decoded_log :"symbol" :: STRING AS symbol,
decoded_log,
event_removed,
CASE
WHEN tx_status = 'success' THEN TRUE
ELSE FALSE
END AS tx_succeeded,
CONCAT(
tx_hash,
'-',
event_index
) AS _log_id,
modified_timestamp
FROM
{{ ref('core__ez_decoded_event_logs') }}
WHERE
topics [0] :: STRING = '0x7e50569d26be643bda7757722291ec66b1be66d8283474ae3fab5a98f878a7a2'
AND contract_address = LOWER('0xe432150cce91c13a887f7D836923d5597adD8E31')
AND tx_succeeded
{% if is_incremental() %}
AND modified_timestamp >= (
SELECT
MAX(modified_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND modified_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
),
native_gas_paid AS (
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
'axelar' AS NAME,
event_index,
topics [0] :: STRING AS topic_0,
event_name,
decoded_flat AS decoded_log,
TRY_TO_NUMBER(
decoded_log :"amount" :: STRING
) AS amount,
decoded_log :"destinationChain" :: STRING AS destinationChain,
LOWER(
decoded_log :"destinationAddress" :: STRING
) AS destinationAddress,
TRY_TO_NUMBER(
decoded_log :"gasFeeAmount" :: STRING
) AS gasFeeAmount,
decoded_log :"payloadHash" :: STRING AS payloadHash,
decoded_log :"refundAddress" :: STRING AS refundAddress,
decoded_log :"sourceAddress" :: STRING AS sourceAddress,
decoded_log :"symbol" :: STRING AS symbol,
decoded_log,
event_removed,
CASE
WHEN tx_status = 'success' THEN TRUE
ELSE FALSE
END AS tx_succeeded,
CONCAT(
tx_hash,
'-',
event_index
) AS _log_id,
modified_timestamp
FROM
{{ ref('core__ez_decoded_event_logs') }}
WHERE
topics [0] :: STRING = '0x999d431b58761213cf53af96262b67a069cbd963499fd8effd1e21556217b841'
AND contract_address = '0x2d5d7d31f671f86c782533cc367f14109a082712'
AND tx_succeeded
{% if is_incremental() %}
AND modified_timestamp >= (
SELECT
MAX(modified_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND modified_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
),
transfers AS (
SELECT
block_number,
tx_hash,
event_index,
contract_address AS token_address,
_log_id,
_inserted_timestamp
FROM
{{ ref('silver__transfers') }}
WHERE
from_address = '0x492751ec3c57141deb205ec2da8bfcb410738630'
AND to_address IN (
LOWER('0xe432150cce91c13a887f7D836923d5597adD8E31'),
'0x0000000000000000000000000000000000000000'
)
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
),
FINAL AS (
SELECT
b.block_number,
b.block_timestamp,
b.origin_function_signature,
b.origin_from_address,
b.origin_to_address,
b.tx_hash,
b.event_index,
b.topic_0,
b.event_name,
b.event_removed,
b.tx_succeeded,
b.contract_address AS bridge_address,
b.name AS platform,
b.origin_from_address AS sender,
CASE
WHEN b.recipient = '0x0000000000000000000000000000000000000000' THEN refundAddress
ELSE b.recipient
END AS receiver,
CASE
WHEN LOWER(
b.destinationChain
) = 'avalanche' THEN 'avalanche c-chain'
WHEN LOWER(
b.destinationChain
) = 'binance' THEN 'bnb smart chain mainnet'
WHEN LOWER(
b.destinationChain
) = 'celo' THEN 'celo mainnet'
WHEN LOWER(
b.destinationChain
) = 'ethereum' THEN 'ethereum mainnet'
WHEN LOWER(
b.destinationChain
) = 'fantom' THEN 'fantom opera'
WHEN LOWER(
b.destinationChain
) = 'polygon' THEN 'polygon mainnet'
ELSE LOWER(
b.destinationChain
)
END AS destination_chain,
b.destinationContractAddress AS destination_contract_address,
CASE
WHEN destination_chain IN (
'arbitrum',
'avalanche c-chain',
'base',
'bnb smart chain mainnet',
'celo mainnet',
'centrifuge',
'ethereum mainnet',
'fantom opera',
'filecoin',
'fraxtal',
'immutable',
'kava',
'linea',
'mantle',
'moonbeam',
'neutron',
'optimism',
'osmosis',
'polygon mainnet',
'scroll'
) THEN receiver
ELSE destination_contract_address
END AS destination_chain_receiver,
b.amount,
b.payload,
b.payloadHash AS payload_hash,
b.symbol AS token_symbol,
t.token_address,
b._log_id,
b.modified_timestamp
FROM
base_evt b
INNER JOIN transfers t
ON b.block_number = t.block_number
AND b.tx_hash = t.tx_hash
LEFT JOIN native_gas_paid n
ON n.block_number = b.block_number
AND n.tx_hash = b.tx_hash
)
SELECT
*
FROM
FINAL qualify (ROW_NUMBER() over (PARTITION BY _log_id
ORDER BY
modified_timestamp DESC)) = 1

View File

@ -0,0 +1,72 @@
version: 2
models:
- name: silver_bridge__axelar_contractcallwithtoken
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: BLOCK_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 3
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null
- name: ORIGIN_FROM_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: ORIGIN_TO_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: EVENT_INDEX
tests:
- not_null
- name: EVENT_NAME
tests:
- not_null
- name: BRIDGE_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: SENDER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: RECEIVER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: DESTINATION_CHAIN_RECEIVER
tests:
- not_null
- name: AMOUNT
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- DECIMAL
- FLOAT
- NUMBER
- name: TOKEN_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+

View File

@ -0,0 +1,240 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = "block_number",
cluster_by = ['block_timestamp::DATE'],
tags = ['curated','reorg']
) }}
WITH hyperlane_assets AS (
SELECT
DISTINCT contract_address
FROM
{{ ref('core__fact_event_logs') }}
WHERE
topics [0] :: STRING IN (
'0xd229aacb94204188fe8042965fa6b269c62dc5818b21238779ab64bdd17efeec',
-- SentTransferRemote
'0xba20947a325f450d232530e5f5fce293e7963499d5309a07cee84a269f2f15a6' -- ReceivedTransferRemote
)
{% if is_incremental() %}
AND contract_address NOT IN (
SELECT
DISTINCT contract_address
FROM
{{ this }}
)
{% endif %}
),
dispatch AS (
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
event_index,
contract_address,
event_removed,
CONCAT('0x', SUBSTR(topics [1] :: STRING, 27, 40)) AS sender,
-- src bridge token address, not user address
TRY_TO_NUMBER(utils.udf_hex_to_int(topics [2] :: STRING)) AS destination,
CONCAT('0x', SUBSTR(topics [3] :: STRING, 27, 40)) AS recipient,
-- dst bridge token address, not recipient address
DATA,
CASE
WHEN tx_status = 'success' THEN TRUE
ELSE FALSE
END AS tx_succeeded,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp
FROM
{{ ref('core__fact_event_logs') }}
WHERE
topics [0] :: STRING = '0x769f711d20c679153d382254f59892613b58a97cc876b249134ac25c80f9c814'
AND contract_address = LOWER('0x3a867fCfFeC2B790970eeBDC9023E75B0a172aa7')
AND tx_succeeded
{% if is_incremental() %}
AND modified_timestamp >= (
SELECT
MAX(modified_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND modified_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
),
dispatch_id AS (
SELECT
block_number,
block_timestamp,
tx_hash,
event_index,
topics [1] :: STRING AS messageId,
CASE
WHEN tx_status = 'success' THEN TRUE
ELSE FALSE
END AS tx_succeeded,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp
FROM
{{ ref('core__fact_event_logs') }}
WHERE
topics [0] :: STRING = '0x788dbc1b7152732178210e7f4d9d010ef016f9eafbe66786bd7169f56e0c353a'
AND contract_address = LOWER('0x3a867fCfFeC2B790970eeBDC9023E75B0a172aa7')
AND tx_succeeded
{% if is_incremental() %}
AND modified_timestamp >= (
SELECT
MAX(modified_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND modified_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
),
gas_payment AS (
SELECT
block_number,
block_timestamp,
tx_hash,
event_index,
topics [1] :: STRING AS messageId,
TRY_TO_NUMBER(utils.udf_hex_to_int(topics [2] :: STRING)) AS destinationDomain,
regexp_substr_all(SUBSTR(DATA, 3, len(DATA)), '.{64}') AS segmented_data,
TRY_TO_NUMBER(utils.udf_hex_to_int(segmented_data [0] :: STRING)) AS gasAmount,
TRY_TO_NUMBER(utils.udf_hex_to_int(segmented_data [1] :: STRING)) AS payment,
CASE
WHEN tx_status = 'success' THEN TRUE
ELSE FALSE
END AS tx_succeeded,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp
FROM
{{ ref('core__fact_event_logs') }}
WHERE
topics [0] = '0x65695c3748edae85a24cc2c60b299b31f463050bc259150d2e5802ec8d11720a'
AND contract_address = ('0xB3fCcD379ad66CED0c91028520C64226611A48c9')
AND tx_succeeded
{% if is_incremental() %}
AND modified_timestamp >= (
SELECT
MAX(modified_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND modified_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
),
sent_transfer_remote AS (
SELECT
block_number,
block_timestamp,
tx_hash,
event_index,
CONCAT('0x', SUBSTR(topics [1] :: STRING, 27, 40)) AS destination,
CONCAT('0x', SUBSTR(topics [2] :: STRING, 27, 40)) AS receipient,
-- actual receipient
TRY_TO_NUMBER(utils.udf_hex_to_int(DATA :: STRING)) AS amount,
CASE
WHEN tx_status = 'success' THEN TRUE
ELSE FALSE
END AS tx_succeeded,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp
FROM
{{ ref('core__fact_event_logs') }}
WHERE
topics [0] :: STRING = '0xd229aacb94204188fe8042965fa6b269c62dc5818b21238779ab64bdd17efeec'
AND contract_address IN (
SELECT
*
FROM
hyperlane_assets
)
AND tx_succeeded
{% if is_incremental() %}
AND modified_timestamp >= (
SELECT
MAX(modified_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND modified_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
),
-- this can be replaced by a 1 contract read of contracts in the hyperlane_asset (hyperlane_asset contracts have a wrappedtoken function)
token_transfer AS (
-- this matches tx_hash with each token's burn tx. this works since each contract only handles 1 token.
SELECT
tx_hash,
contract_address AS token_address
FROM
{{ ref('silver__transfers') }}
WHERE
tx_hash IN (
SELECT
tx_hash
FROM
sent_transfer_remote
)
AND to_address = '0x0000000000000000000000000000000000000000'
)
SELECT
block_number,
block_timestamp,
origin_function_signature,
origin_from_address,
origin_to_address,
tx_hash,
event_index,
contract_address,
'Dispatch' AS event_name,
event_removed,
tx_succeeded,
-- from dispatch
sender,
-- src bridge token
receipient AS destination_chain_receiver,
-- dst bridge token
destination AS destinationChainId,
-- from dispatch_id
messageId,
-- from gas_payment
gasAmount,
payment,
-- from sent_transfer_remote
receipient AS receiver,
-- actual receiver address
amount -- from token_transfer
token_address,
_log_id,
modified_timestamp
FROM
dispatch
INNER JOIN dispatch_id USING(tx_hash)
INNER JOIN gas_payment USING(tx_hash)
INNER JOIN token_transfer USING(tx_hash)

View File

@ -0,0 +1,72 @@
version: 2
models:
- name: silver_bridge__hyperlane_bridge
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: BLOCK_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 3
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null
- name: ORIGIN_FROM_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: ORIGIN_TO_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: EVENT_INDEX
tests:
- not_null
- name: EVENT_NAME
tests:
- not_null
- name: BRIDGE_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: SENDER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: RECEIVER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: DESTINATION_CHAIN_RECEIVER
tests:
- not_null
- name: AMOUNT
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- DECIMAL
- FLOAT
- NUMBER
- name: TOKEN_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+

View File

@ -0,0 +1,102 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = "block_number",
cluster_by = ['block_timestamp::DATE'],
tags = ['curated','reorg']
) }}
with bridge_router as (
-- for bridge tx utilizing router
select
block_number,
block_timestamp,
origin_from_address,
origin_to_address,
origin_function_signature,
tx_hash,
contract_address,
event_index,
origin_to_address as to_address,
origin_from_address as from_address,
origin_from_address as depositor,
TRY_TO_NUMBER(right(utils.udf_hex_to_int(data :: STRING), 4)) as destinationChainId,
TRY_TO_NUMBER(utils.udf_hex_to_int(data :: STRING)*pow(10,-18)) as value,
origin_from_address as receipient,
concat('0x', substr(topics[1]::STRING, 27, 40)) as bridge_address,
CASE
WHEN tx_status = 'success' THEN TRUE
ELSE FALSE
END AS tx_succeeded,
CONCAT(
tx_hash,
'-',
event_index
) AS _log_id,
modified_timestamp
from {{ ref('core__fact_event_logs') }}
where
1=1
--and bridge_address in ('0xe4edb277e41dc89ab076a1f049f4a3efa700bce8', '0xee73323912a4e3772b74ed0ca1595a152b0ef282') -- orbiter bridge
and topics[0]::STRING ='0x69ca02dd4edd7bf0a4abb9ed3b7af3f14778db5d61921c7dc7cd545266326de2'
and contract_address='0x13e46b2a3f8512ed4682a8fb8b560589fe3c2172'
AND tx_succeeded
{% if is_incremental() %}
AND modified_timestamp >= (
SELECT
MAX(modified_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND modified_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
),
{# bridge_native as (
-- for direct native eth transfers
select
block_number,
block_timestamp,
tx_hash,
to_address,
from_address,
from_address as depositor,
right(value_precise_raw, 4) as destinationChainId,
value,
from_address as receipient,
to_address as bridge_address,
CASE
WHEN status = 'SUCCESS' THEN TRUE
ELSE FALSE
END AS tx_succeeded,
modified_timestamp
from {{ ref('core__fact_transactions') }}
where to_address in ('0xe4edb277e41dc89ab076a1f049f4a3efa700bce8','0x80c67432656d59144ceff962e8faf8926599bcf8')
and tx_succeeded
{% if is_incremental() %}
AND modified_timestamp >= (
SELECT
MAX(modified_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND modified_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
),
bridge_combine as (
select * from bridge_native
union all
select * from bridge_router
)
select
*,
'orbiter finance' as platform,
'0x4300000000000000000000000000000000000004' as token_address, -- hardcoded weth contract address
value as token_amount -- native eth
from
bridge_combine #}

View File

@ -0,0 +1,72 @@
version: 2
models:
- name: silver_bridge__synapse_tokenredeem
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: BLOCK_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 3
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null
- name: ORIGIN_FROM_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: ORIGIN_TO_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: EVENT_INDEX
tests:
- not_null
- name: EVENT_NAME
tests:
- not_null
- name: BRIDGE_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: SENDER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: RECEIVER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: DESTINATION_CHAIN_RECEIVER
tests:
- not_null
- name: AMOUNT
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- DECIMAL
- FLOAT
- NUMBER
- name: TOKEN_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+

View File

@ -0,0 +1,86 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = "block_number",
cluster_by = ['block_timestamp::DATE'],
tags = ['curated','reorg']
) }}
WITH base_evt AS (
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
'symbiosis' AS NAME,
event_index,
topics [0] :: STRING AS topic_0,
regexp_substr_all(SUBSTR(DATA, 3, len(DATA)), '.{64}') AS segmented_data,
'SynthesizeRequest' as event_name,
TRY_TO_NUMBER(utils.udf_hex_to_int(
segmented_data[2] :"amount" :: STRING
)) AS amount,
TRY_TO_NUMBER(utils.udf_hex_to_int(
topics[2] :"chainID" :: STRING
)) AS chainID,
concat('0x', substr(topics[1] :: STRING, 27, 40)) AS from_address,
segmented_data[0] :: STRING AS id,
concat('0x', substr(topics[3] :: STRING, 27, 40)) AS revertableAddress,
concat('0x', substr(segmented_data[1] :: STRING, 25, 40)) AS to_address,
segmented_data[3] :: STRING AS token,
CASE
WHEN tx_status = 'success' THEN TRUE
ELSE FALSE
END AS tx_succeeded,
CONCAT(
tx_hash,
'-',
event_index
) AS _log_id,
modified_timestamp
FROM
{{ ref('core__fact_event_logs') }}
WHERE
topics [0] :: STRING = '0x31325fe0a1a2e6a5b1e41572156ba5b4e94f0fae7e7f63ec21e9b5ce1e4b3eab'
AND contract_address = '0x5aa5f7f84ed0e5db0a4a85c3947ea16b53352fd4'
AND tx_succeeded
{% if is_incremental() %}
AND modified_timestamp >= (
SELECT
MAX(modified_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND modified_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
)
SELECT
block_number,
block_timestamp,
origin_function_signature,
origin_from_address,
origin_to_address,
tx_hash,
event_index,
topic_0,
event_name,
tx_succeeded,
contract_address AS bridge_address,
NAME AS platform,
from_address AS sender,
to_address AS receiver,
receiver AS destination_chain_receiver,
amount,
chainID AS destination_chain_id,
id,
revertableAddress AS revertable_address,
token AS token_address,
_log_id,
modified_timestamp
FROM
base_evt

View File

@ -0,0 +1,72 @@
version: 2
models:
- name: silver_bridge__symbiosis_synthesizerequest
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: BLOCK_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 3
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null
- name: ORIGIN_FROM_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: ORIGIN_TO_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: EVENT_INDEX
tests:
- not_null
- name: EVENT_NAME
tests:
- not_null
- name: BRIDGE_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: SENDER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: RECEIVER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: DESTINATION_CHAIN_RECEIVER
tests:
- not_null
- name: AMOUNT
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- DECIMAL
- FLOAT
- NUMBER
- name: TOKEN_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+

View File

@ -12,6 +12,9 @@ models:
- name: BLOCK_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 3
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null
@ -66,9 +69,4 @@ models:
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: _INSERTED_TIMESTAMP
tests:
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 3
regex: 0[xX][0-9a-fA-F]+