AN-6385/add across bridge (#365)

add across
This commit is contained in:
Sam 2025-07-07 23:37:37 +08:00 committed by GitHub
parent 1622759c33
commit 68931c47d5
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
3 changed files with 251 additions and 2 deletions

View File

@ -0,0 +1,134 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = "block_number",
cluster_by = ['block_timestamp::DATE'],
tags = ['silver_bridge','defi','bridge','curated']
) }}
WITH base_evt AS (
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
'across-v3' AS NAME,
event_index,
topic_0,
CASE
WHEN topic_0 = '0x32ed1a409ef04c7b0227189c3a103dc5ac10e775a15b785dcc510201f7c25ad3' THEN 'FundsDeposited'
WHEN topic_0 = '0xa123dc29aebf7d0c3322c8eeb5b999e859f39937950ed31056532713d0de396f' THEN 'V3FundsDeposited'
END AS event_name,
topics,
DATA,
regexp_substr_all(SUBSTR(DATA, 3, len(DATA)), '.{64}') AS segmented_data,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
topic_1 :: STRING
)
) AS destinationChainId,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
topic_2 :: STRING
)
) AS depositId,
CONCAT('0x', SUBSTR(topic_3 :: STRING, 27, 40)) AS depositor,
CONCAT('0x', SUBSTR(segmented_data [0] :: STRING, 25, 40)) AS inputToken,
CONCAT('0x', SUBSTR(segmented_data [1] :: STRING, 25, 40)) AS outputToken,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [2] :: STRING
)
) AS inputAmount,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [3] :: STRING
)
) AS outputAmount,
TRY_TO_TIMESTAMP(
utils.udf_hex_to_int(
segmented_data [4] :: STRING
)
) AS quoteTimestamp,
TRY_TO_TIMESTAMP(
utils.udf_hex_to_int(
segmented_data [5] :: STRING
)
) AS fillDeadline,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [6] :: STRING
)
) AS exclusivityDeadline,
CONCAT('0x', SUBSTR(segmented_data [7] :: STRING, 25, 40)) AS recipient,
CONCAT('0x', SUBSTR(segmented_data [8] :: STRING, 25, 40)) AS exclusiveRelayer,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [9] :: STRING
)
) AS relayerFeePct,
segmented_data [10] :: STRING AS message,
event_removed,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__fact_event_logs') }}
WHERE
topic_0 IN (
'0x32ed1a409ef04c7b0227189c3a103dc5ac10e775a15b785dcc510201f7c25ad3',
'0xa123dc29aebf7d0c3322c8eeb5b999e859f39937950ed31056532713d0de396f'
)
AND contract_address = '0x4e8e101924ede233c13e2d8622dc8aed2872d505'
AND tx_succeeded
AND block_timestamp :: DATE >= '2025-04-01'
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
)
SELECT
block_number,
block_timestamp,
origin_function_signature,
origin_from_address,
origin_to_address,
tx_hash,
event_index,
topic_0,
event_name,
event_removed,
contract_address AS bridge_address,
NAME AS platform,
depositor AS sender,
recipient AS receiver,
recipient AS destination_chain_receiver,
destinationChainId AS destination_chain_id,
inputAmount AS amount,
inputToken AS token_address,
depositId AS deposit_id,
message,
quoteTimestamp AS quote_timestamp,
relayerFeePct AS relayer_fee_pct,
exclusiveRelayer AS exclusive_relayer,
exclusivityDeadline AS exclusivity_deadline,
fillDeadline AS fill_deadline,
outputAmount AS output_amount,
outputToken AS output_token,
_log_id,
_inserted_timestamp
FROM
base_evt

View File

@ -0,0 +1,71 @@
version: 2
models:
- name: silver_bridge__across_v3fundsdeposited
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: BLOCK_TIMESTAMP
tests:
- not_null
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null
- name: ORIGIN_FROM_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: ORIGIN_TO_ADDRESS
tests:
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: EVENT_INDEX
tests:
- not_null
- name: EVENT_NAME
tests:
- not_null
- name: BRIDGE_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: SENDER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: RECEIVER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: DESTINATION_CHAIN_RECEIVER
tests:
- not_null
- name: AMOUNT
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- DECIMAL
- FLOAT
- NUMBER
- name: TOKEN_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: _INSERTED_TIMESTAMP
tests:
- not_null

View File

@ -8,8 +8,44 @@
tags = ['silver_bridge','defi','bridge','curated','heal']
) }}
WITH allbridge AS (
WITH across_v3 AS (
SELECT
block_number,
block_timestamp,
origin_from_address,
origin_to_address,
origin_function_signature,
tx_hash,
event_index,
bridge_address,
event_name,
platform,
'v3' AS version,
sender,
receiver,
destination_chain_receiver,
destination_chain_id :: STRING AS destination_chain_id,
NULL AS destination_chain,
token_address,
NULL AS token_symbol,
amount AS amount_unadj,
_log_id AS _id,
_inserted_timestamp
FROM
{{ ref('silver_bridge__across_v3fundsdeposited') }}
{% if is_incremental() and 'across_v3' not in var('HEAL_MODELS') %}
WHERE
_inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '{{ var("LOOKBACK", "4 hours") }}'
FROM
{{ this }}
)
{% endif %}
),
allbridge AS (
SELECT
block_number,
block_timestamp,
@ -550,6 +586,11 @@ WHERE
{% endif %}
),
all_protocols AS (
SELECT
*
FROM
across_v3
UNION ALL
SELECT
*
FROM
@ -926,7 +967,10 @@ SELECT
amount_unadj,
amount,
amount_usd,
IFNULL(token_is_verified, FALSE) AS token_is_verified,
IFNULL(
token_is_verified,
FALSE
) AS token_is_verified,
_id,
_inserted_timestamp,
{{ dbt_utils.generate_surrogate_key(