AN-3841/bridge-curation (#328)

* updated contracts

* token symbol

* all bridge filter

* wormhole seed

* filter
This commit is contained in:
drethereum 2024-01-11 13:00:47 -05:00 committed by GitHub
parent 50423eedd9
commit 4cb5986a1b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
104 changed files with 3860 additions and 2 deletions

View File

@ -0,0 +1,41 @@
chain,short_coin_type
Aptos,0x027d
Arbitrum,0x2329
Aurora,0x0a0a
Avalanche,0x2328
Base,0x2105
Bitcoin,0x0000
BNB Chain,0x02ca
Celo,0xce10
Coinweb,0x08ae
Conflux,0x01f7
Crypto Chain,0x018a
EOS,0x00c2
Ethereum,0x003c
Evmos,0x11bc
Fantom,0x03ef
Flow,0x021b
Gnosis,0x02bc
Harmony,0x03ff
Linea,0xe708
Manta,0x0263
Mantle,0x1388
Metis,0x0440
Moonbeam,0x0504
Moonriver,0x0505
Nautilus,0x56ce
Near,0x018d
opBNB,0x00cc
Optimism,0x0266
Polygon POS,0x03c6
Polygon zkEVM,0x044d
Scroll,0x2750
SKALE Europa,0x9296
SKALE Nebula,0xb4b1
Solana,0x01f5
Sui,0x0310
Terra,0x014a
Thorchain,0x03a3
Tron,0x00c3
zkSync Era,0x0324
zkFair,0xa70e
1 chain short_coin_type
2 Aptos 0x027d
3 Arbitrum 0x2329
4 Aurora 0x0a0a
5 Avalanche 0x2328
6 Base 0x2105
7 Bitcoin 0x0000
8 BNB Chain 0x02ca
9 Celo 0xce10
10 Coinweb 0x08ae
11 Conflux 0x01f7
12 Crypto Chain 0x018a
13 EOS 0x00c2
14 Ethereum 0x003c
15 Evmos 0x11bc
16 Fantom 0x03ef
17 Flow 0x021b
18 Gnosis 0x02bc
19 Harmony 0x03ff
20 Linea 0xe708
21 Manta 0x0263
22 Mantle 0x1388
23 Metis 0x0440
24 Moonbeam 0x0504
25 Moonriver 0x0505
26 Nautilus 0x56ce
27 Near 0x018d
28 opBNB 0x00cc
29 Optimism 0x0266
30 Polygon POS 0x03c6
31 Polygon zkEVM 0x044d
32 Scroll 0x2750
33 SKALE Europa 0x9296
34 SKALE Nebula 0xb4b1
35 Solana 0x01f5
36 Sui 0x0310
37 Terra 0x014a
38 Thorchain 0x03a3
39 Tron 0x00c3
40 zkSync Era 0x0324
41 zkFair 0xa70e

View File

@ -0,0 +1,98 @@
destination_chain,standard_destination_chain
acala,acala
algorand,algorand
aptos,aptos
arbitrum,arbitrum
arbitrum nova,arbitrum nova
arbitrum one,arbitrum
archway,archway
astar,astar
aurora,aurora
aurora mainnet,aurora
avalanche,avalanche
avalanche c-chain,avalanche
base,base
bnb,bsc
bnb chain,bsc
bnb smart chain mainnet,bsc
boba bnb mainnet,boba
boba network,boba
bsc,bsc
canto,canto
carbon,carbon
celo,celo
celo mainnet,celo
coinweb,coinweb
conflux,conflux
conflux espace,conflux
crab network,crab
crescent,crescent
cronos mainnet,cronos
crypto chain,crypto
dfk chain,dfk
dogechain mainnet,dogechain
eos,eos
ethereum,ethereum
ethereum mainnet,ethereum
evmos,evmos
fantom,fantom
fantom opera,fantom
filecoin,filecoin
fuse,fuse
gnosis,gnosis
harmony mainnet shard 0,harmony
huobi eco chain mainnet,huobi eco
injective,injective
juno,juno
karura,karura
kava,kava
klaytn,klaytn
klaytn mainnet cypress,klaytn
kujira,kujira
linea,linea
manta,manta
mantle,mantle
metis,metis
metis andromeda mainnet,metis
moonbeam,moonbeam
moonriver,moonriver
nautilus,nautilus
near,near
neutron,neutron
oasis,oasis
okxchain mainnet,okxchain
ontology mainnet,ontology
op mainnet,optimism
opbnb,opbnb
optimism,optimism
osmosis,osmosis
polygon,polygon
polygon mainnet,polygon
polygon pos,polygon
polygon zkevm,polygon zkevm
ronin,ronin
scroll,scroll
secret-snip,secret
sei,sei
skale europa,skale europa
skale nebula,skale nebula
solana,solana
stargaze,stargaze
starknet,starknet
sui,sui
telos evm mainnet,telos
terra,terra
terra-2,terra2
terra2,terra2
tezos,tezos
tron,tron
umee,umee
waves,waves
xpla,xpla
xrpl,xrpl
zkfair,zkfair
zksync era,zksync era
zksync era mainnet,zksync era
zksync lite,zksync lite
zora,zora
zzz,zzz
1 destination_chain standard_destination_chain
2 acala acala
3 algorand algorand
4 aptos aptos
5 arbitrum arbitrum
6 arbitrum nova arbitrum nova
7 arbitrum one arbitrum
8 archway archway
9 astar astar
10 aurora aurora
11 aurora mainnet aurora
12 avalanche avalanche
13 avalanche c-chain avalanche
14 base base
15 bnb bsc
16 bnb chain bsc
17 bnb smart chain mainnet bsc
18 boba bnb mainnet boba
19 boba network boba
20 bsc bsc
21 canto canto
22 carbon carbon
23 celo celo
24 celo mainnet celo
25 coinweb coinweb
26 conflux conflux
27 conflux espace conflux
28 crab network crab
29 crescent crescent
30 cronos mainnet cronos
31 crypto chain crypto
32 dfk chain dfk
33 dogechain mainnet dogechain
34 eos eos
35 ethereum ethereum
36 ethereum mainnet ethereum
37 evmos evmos
38 fantom fantom
39 fantom opera fantom
40 filecoin filecoin
41 fuse fuse
42 gnosis gnosis
43 harmony mainnet shard 0 harmony
44 huobi eco chain mainnet huobi eco
45 injective injective
46 juno juno
47 karura karura
48 kava kava
49 klaytn klaytn
50 klaytn mainnet cypress klaytn
51 kujira kujira
52 linea linea
53 manta manta
54 mantle mantle
55 metis metis
56 metis andromeda mainnet metis
57 moonbeam moonbeam
58 moonriver moonriver
59 nautilus nautilus
60 near near
61 neutron neutron
62 oasis oasis
63 okxchain mainnet okxchain
64 ontology mainnet ontology
65 op mainnet optimism
66 opbnb opbnb
67 optimism optimism
68 osmosis osmosis
69 polygon polygon
70 polygon mainnet polygon
71 polygon pos polygon
72 polygon zkevm polygon zkevm
73 ronin ronin
74 scroll scroll
75 secret-snip secret
76 sei sei
77 skale europa skale europa
78 skale nebula skale nebula
79 solana solana
80 stargaze stargaze
81 starknet starknet
82 sui sui
83 telos evm mainnet telos
84 terra terra
85 terra-2 terra2
86 terra2 terra2
87 tezos tezos
88 tron tron
89 umee umee
90 waves waves
91 xpla xpla
92 xrpl xrpl
93 zkfair zkfair
94 zksync era zksync era
95 zksync era mainnet zksync era
96 zksync lite zksync lite
97 zora zora
98 zzz zzz

View File

@ -0,0 +1,13 @@
chain_name,chain_id
Arbitrum,110
Avalanche,106
Base,184
BNB,102
Ethereum,101
Fantom,112
Kava,177
Linea,183
Mantle,181
Metis,151
Optimism,111
Polygon,109
1 chain_name chain_id
2 Arbitrum 110
3 Avalanche 106
4 Base 184
5 BNB 102
6 Ethereum 101
7 Fantom 112
8 Kava 177
9 Linea 183
10 Mantle 181
11 Metis 151
12 Optimism 111
13 Polygon 109

View File

@ -0,0 +1,37 @@
chain_name,wormhole_chain_id
acala,12
algorand,8
aptos,22
arbitrum,23
aurora,9
avalanche,6
avalanche,43114
base,30
bsc,4
celo,14
cosmoshub,4000
ethereum,2
evmos,4001
fantom,10
gnosis,25
injective,19
karura,11
klaytn,13
kujira,4002
moonbeam,16
near,15
neon,17
oasis,7
optimism,24
osmosis,20
polygon,5
polygon,137
pythnet,26
rootstock,33
sei,32
sepolia,10002
solana,1
sui,21
terra,3
terra2,18
xpla,28
1 chain_name wormhole_chain_id
2 acala 12
3 algorand 8
4 aptos 22
5 arbitrum 23
6 aurora 9
7 avalanche 6
8 avalanche 43114
9 base 30
10 bsc 4
11 celo 14
12 cosmoshub 4000
13 ethereum 2
14 evmos 4001
15 fantom 10
16 gnosis 25
17 injective 19
18 karura 11
19 klaytn 13
20 kujira 4002
21 moonbeam 16
22 near 15
23 neon 17
24 oasis 7
25 optimism 24
26 osmosis 20
27 polygon 5
28 polygon 137
29 pythnet 26
30 rootstock 33
31 sei 32
32 sepolia 10002
33 solana 1
34 sui 21
35 terra 3
36 terra2 18
37 xpla 28

View File

@ -0,0 +1,83 @@
{% docs evm_bridge_table_doc %}
A convenience table that aggregates bridge activity from event_logs, traces and transfers, including bridge deposits and transfers sent from the following protocols: ACROSS, ALLBRIDGE, AXELAR, CELER, CBRIDGE, HOP, MESON, MULTICHAIN, STARGATE, SYMBIOSIS, SYNAPSE, WORMHOLE along with other helpful columns, including an amount USD where available. Note, this table only includes records for the protocols listed above with live, onchain bridge activity and may not represent the complete bridging picture.
{% enddocs %}
{% docs evm_bridge_platform %}
The platform or protocol from which the bridge transaction or event originates.
{% enddocs %}
{% docs evm_bridge_origin_from %}
The from address where the transaction originated from. This may be an EOA or contract address, however in most cases this is the user that initiated the bridge deposit or transfer.
{% enddocs %}
{% docs evm_bridge_sender %}
The address that initiated the bridge deposit or transfer. This address is the sender of the tokens/assets being bridged to the destination chain. This may be an EOA or contract address.
{% enddocs %}
{% docs evm_bridge_receiver %}
The designated address set to receive the deposit or transfer. This may be an EOA or contract address.
{% enddocs %}
{% docs evm_bridge_destination_chain_receiver %}
The designated address set to receive the bridged tokens on the target chain after the completion of the bridge transaction. For non-evm chains, the hex address is decoded/encoded to match the data format of the destination chain, where possible. This may be an EOA or contract address.
{% enddocs %}
{% docs evm_bridge_destination_chain %}
The name of the blockchain network to which the assets are being bridged. It could be any EVM compatible chain or other blockchain networks that the bridging protocol supports.
{% enddocs %}
{% docs evm_bridge_destination_chain_id %}
The numeric identifier associated with the destination blockchain network. This is specific to the chain and helps in uniquely identifying it.
{% enddocs %}
{% docs evm_bridge_address %}
The address of the contract responsible for handling the bridge deposit or transfer. This contract mediates the transfer and ensures that assets are sent and received appropriately.
{% enddocs %}
{% docs evm_bridge_token_address %}
The address associated with the token that is being bridged. It provides a unique identifier for the token within its origin blockchain.
{% enddocs %}
{% docs evm_bridge_token_symbol %}
The symbol representing the token being bridged. This provides a shorthand representation of the token.
{% enddocs %}
{% docs evm_bridge_amount_unadj %}
The raw, non-decimal adjusted amount of tokens involved in the bridge transaction.
{% enddocs %}
{% docs evm_bridge_amount %}
The decimal adjusted amount of tokens involved in the bridge transaction, where available.
{% enddocs %}
{% docs evm_bridge_amount_usd %}
The value of the bridged tokens in USD at the time of the bridge transaction, where available.
{% enddocs %}

View File

@ -45,6 +45,7 @@ There is more information on how to use dbt docs in the last section of this doc
### DeFi Tables (polygon.defi)
- [dim_dex_liquidity_pools](https://flipsidecrypto.github.io/polygon-models/#!/model/model.polygon_models.defi__dim_dex_liquidity_pools)
- [ez_dex_swaps](https://flipsidecrypto.github.io/polygon-models/#!/model/model.polygon_models.defi__ez_dex_swaps)
- [ez_bridge_activity](https://flipsidecrypto.github.io/polygon-models/#!/model/model.polygon_models.defi__ez_bridge_activity)
### NFT Tables (polygon.nft)
- [ez_nft_mints](https://flipsidecrypto.github.io/polygon-models/#!/model/model.polygon_models.nft__ez_nft_mints)

View File

@ -0,0 +1,55 @@
{{ config(
materialized = 'view',
persist_docs ={ "relation": true,
"columns": true },
meta ={
'database_tags':{
'table':{
'PROTOCOL': 'ACROSS, ALLBRIDGE, AXELAR, CELER, CBRIDGE, HOP, MESON, MULTICHAIN, STARGATE, SYMBIOSIS, SYNAPSE, WORMHOLE',
'PURPOSE': 'BRIDGE'
} } }
) }}
SELECT
block_number,
block_timestamp,
origin_from_address,
origin_to_address,
origin_function_signature,
tx_hash,
event_index,
bridge_address,
event_name,
platform,
sender,
receiver,
destination_chain_receiver,
COALESCE(
standard_destination_chain,
b.destination_chain
) AS destination_chain,
destination_chain_id,
token_address,
token_symbol,
amount_unadj,
amount,
amount_usd,
COALESCE (
complete_bridge_activity_id,
{{ dbt_utils.generate_surrogate_key(
['_id']
) }}
) AS ez_bridge_activity_id,
COALESCE(
inserted_timestamp,
'2000-01-01'
) AS inserted_timestamp,
COALESCE(
modified_timestamp,
'2000-01-01'
) AS modified_timestamp
FROM
{{ ref('silver_bridge__complete_bridge_activity') }}
b
LEFT JOIN {{ ref('silver_bridge__standard_dst_chain_seed') }} C
ON b.destination_chain = C.destination_chain

View File

@ -0,0 +1,54 @@
version: 2
models:
- name: defi__ez_bridge_activity
description: '{{ doc("evm_bridge_table_doc") }}'
columns:
- name: BLOCK_NUMBER
description: '{{ doc("poly_block_number") }}'
- name: BLOCK_TIMESTAMP
description: '{{ doc("poly_block_timestamp") }}'
- name: TX_HASH
description: '{{ doc("poly_logs_tx_hash") }}'
- name: CONTRACT_ADDRESS
description: '{{ doc("poly_logs_contract_address") }}'
- name: EVENT_NAME
description: '{{ doc("poly_event_name") }}'
- name: EVENT_INDEX
description: '{{ doc("poly_event_index") }}'
- name: ORIGIN_FUNCTION_SIGNATURE
description: '{{ doc("nft_origin_sig") }}'
- name: ORIGIN_FROM_ADDRESS
description: '{{ doc("evm_bridge_origin_from") }}'
- name: ORIGIN_TO_ADDRESS
description: '{{ doc("poly_origin_from") }}'
- name: PLATFORM
description: '{{ doc("evm_bridge_platform") }}'
- name: SENDER
description: '{{ doc("evm_bridge_sender") }}'
- name: RECEIVER
description: '{{ doc("evm_bridge_receiver") }}'
- name: DESTINATION_CHAIN_RECEIVER
description: '{{ doc("evm_bridge_destination_chain_receiver") }}'
- name: DESTINATION_CHAIN
description: '{{ doc("evm_bridge_destination_chain") }}'
- name: DESTINATION_CHAIN_ID
description: '{{ doc("evm_bridge_destination_chain_id") }}'
- name: BRIDGE_ADDRESS
description: '{{ doc("evm_bridge_address") }}'
- name: TOKEN_ADDRESS
description: '{{ doc("evm_bridge_token_address") }}'
- name: TOKEN_SYMBOL
description: '{{ doc("evm_bridge_token_symbol") }}'
- name: AMOUNT_UNADJ
description: '{{ doc("evm_bridge_amount_unadj") }}'
- name: AMOUNT
description: '{{ doc("evm_bridge_amount") }}'
- name: AMOUNT_USD
description: '{{ doc("evm_bridge_amount_usd") }}'
- name: EZ_BRIDGE_ACTIVITY_ID
description: '{{ doc("pk") }}'
- name: INSERTED_TIMESTAMP
description: '{{ doc("inserted_timestamp") }}'
- name: MODIFIED_TIMESTAMP
description: '{{ doc("modified_timestamp") }}'

View File

@ -0,0 +1,93 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = "block_number",
cluster_by = ['block_timestamp::DATE'],
tags = ['curated','reorg']
) }}
WITH base_evt AS (
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
'across' AS NAME,
event_index,
topics [0] :: STRING AS topic_0,
event_name,
TRY_TO_NUMBER(
decoded_flat :"amount" :: STRING
) AS amount,
TRY_TO_NUMBER(
decoded_flat :"depositId" :: STRING
) AS depositId,
decoded_flat :"depositor" :: STRING AS depositor,
TRY_TO_NUMBER(
decoded_flat :"destinationChainId" :: STRING
) AS destinationChainId,
decoded_flat :"message" :: STRING AS message,
TRY_TO_NUMBER(
decoded_flat :"originChainId" :: STRING
) AS originChainId,
decoded_flat :"originToken" :: STRING AS originToken,
TRY_TO_TIMESTAMP(
decoded_flat :"quoteTimestamp" :: STRING
) AS quoteTimestamp,
decoded_flat :"recipient" :: STRING AS recipient,
TRY_TO_NUMBER(
decoded_flat :"relayerFeePct" :: STRING
) AS relayerFeePct,
decoded_flat,
event_removed,
tx_status,
_log_id,
_inserted_timestamp
FROM
{{ ref('silver__decoded_logs') }}
WHERE
topics [0] :: STRING = '0xafc4df6845a4ab948b492800d3d8a25d538a102a2bc07cd01f1cfa097fddcff6'
AND contract_address = '0x9295ee1d8c5b022be115a2ad3c30c72e34e7f096'
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
{% endif %}
)
SELECT
block_number,
block_timestamp,
origin_function_signature,
origin_from_address,
origin_to_address,
tx_hash,
event_index,
topic_0,
event_name,
event_removed,
tx_status,
contract_address AS bridge_address,
name AS platform,
depositor AS sender,
recipient AS receiver,
recipient AS destination_chain_receiver,
destinationChainId AS destination_chain_id,
amount,
depositId AS deposit_id,
message,
originChainId AS origin_chain_id,
originToken AS token_address,
quoteTimestamp AS quote_timestamp,
relayerFeePct AS relayer_fee_pct,
_log_id,
_inserted_timestamp
FROM
base_evt

View File

@ -0,0 +1,74 @@
version: 2
models:
- name: silver_bridge__across_fundsdeposited
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: BLOCK_TIMESTAMP
tests:
- not_null
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null
- name: ORIGIN_FROM_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: ORIGIN_TO_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: EVENT_INDEX
tests:
- not_null
- name: EVENT_NAME
tests:
- not_null
- name: BRIDGE_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: SENDER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: RECEIVER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: DESTINATION_CHAIN_RECEIVER
tests:
- not_null
- name: AMOUNT
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- DECIMAL
- FLOAT
- NUMBER
- name: TOKEN_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: _INSERTED_TIMESTAMP
tests:
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 3

View File

@ -0,0 +1,146 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = "block_number",
cluster_by = ['block_timestamp::DATE'],
tags = ['curated','reorg']
) }}
WITH base_evt AS (
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
'allbridge' AS NAME,
event_index,
topics [0] :: STRING AS topic_0,
event_name,
TRY_TO_NUMBER(
decoded_flat :"amount" :: STRING
) AS amount,
utils.udf_hex_to_string(
SUBSTRING(
decoded_flat :"destination" :: STRING,
3
)
) AS destination_chain_symbol,
decoded_flat :"lockId" :: STRING AS lockId,
decoded_flat :"recipient" :: STRING AS recipient,
decoded_flat :"sender" :: STRING AS sender,
utils.udf_hex_to_string(
SUBSTRING(
decoded_flat :"tokenSource" :: STRING,
3
)
) AS token_source,
REGEXP_REPLACE(
decoded_flat :"tokenSourceAddress" :: STRING,
'0+$',
''
) AS tokenSourceAddress,
decoded_flat,
event_removed,
tx_status,
_log_id,
_inserted_timestamp
FROM
{{ ref('silver__decoded_logs') }}
WHERE
topics [0] :: STRING = '0x884a8def17f0d5bbb3fef53f3136b5320c9b39f75afb8985eeab9ea1153ee56d'
AND contract_address = '0xbbbd1bbb4f9b936c3604906d7592a644071de884'
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
{% endif %}
)
SELECT
block_number,
block_timestamp,
origin_function_signature,
origin_from_address,
origin_to_address,
tx_hash,
event_index,
topic_0,
event_name,
event_removed,
tx_status,
contract_address AS bridge_address,
NAME AS platform,
sender,
recipient AS receiver,
amount,
lockId AS lock_id,
CASE
WHEN destination_chain_symbol = 'AURO' THEN 'aurora mainnet'
WHEN destination_chain_symbol = 'AVA' THEN 'avalanche c-chain'
WHEN destination_chain_symbol = 'BSC' THEN 'bnb smart chain mainnet'
WHEN destination_chain_symbol = 'CELO' THEN 'celo mainnet'
WHEN destination_chain_symbol = 'ETH' THEN 'ethereum mainnet'
WHEN destination_chain_symbol = 'FTM' THEN 'fantom opera'
WHEN destination_chain_symbol = 'HECO' THEN 'huobi eco chain mainnet'
WHEN destination_chain_symbol = 'KLAY' THEN 'klaytn mainnet cypress'
WHEN destination_chain_symbol = 'POL' THEN 'polygon mainnet'
WHEN destination_chain_symbol = 'SOL' THEN 'solana'
WHEN destination_chain_symbol = 'TRA' THEN 'terra'
WHEN destination_chain_symbol = 'TEZ' THEN 'tezos'
WHEN destination_chain_symbol = 'WAVE' THEN 'waves'
ELSE LOWER(destination_chain_symbol)
END AS destination_chain,
CASE
WHEN token_source = 'AURO' THEN 'aurora mainnet'
WHEN token_source = 'AVA' THEN 'avalanche c-chain'
WHEN token_source = 'BSC' THEN 'bnb smart chain mainnet'
WHEN token_source = 'CELO' THEN 'celo mainnet'
WHEN token_source = 'ETH' THEN 'ethereum mainnet'
WHEN token_source = 'FTM' THEN 'fantom opera'
WHEN token_source = 'HECO' THEN 'huobi eco chain mainnet'
WHEN token_source = 'KLAY' THEN 'klaytn mainnet cypress'
WHEN token_source = 'POL' THEN 'polygon mainnet'
WHEN token_source = 'SOL' THEN 'solana'
WHEN token_source = 'TRA' THEN 'terra'
WHEN token_source = 'TEZ' THEN 'tezos'
WHEN token_source = 'WAVE' THEN 'waves'
ELSE LOWER(token_source)
END AS source_chain,
CASE
WHEN destination_chain = 'solana' THEN utils.udf_hex_to_base58(recipient)
WHEN destination_chain = 'waves' THEN utils.udf_hex_to_base58(SUBSTR(recipient,1,54))
WHEN destination_chain ILIKE 'terra%' THEN utils.udf_hex_to_bech32(recipient, SUBSTR(destination_chain, 1, 5))
WHEN destination_chain = 'tezos' THEN utils.udf_hex_to_tezos(CONCAT('0x', SUBSTR(recipient, 7, 40)), 'tz1')
WHEN destination_chain = 'near' THEN utils.udf_hex_to_string(SUBSTR(recipient,3))
WHEN destination_chain IN (
'aurora mainnet',
'avalanche c-chain',
'bnb smart chain mainnet',
'celo mainnet',
'fantom opera',
'fuse',
'huobi eco chain mainnet',
'klaytn mainnet cypress',
'polygon mainnet'
) THEN SUBSTR(
recipient,
1,
42
)
WHEN destination_chain = 'zzz' THEN origin_from_address
ELSE recipient
END AS destination_chain_receiver,
tokenSourceAddress AS token_address,
_log_id,
_inserted_timestamp
FROM
base_evt
WHERE
source_chain = 'polygon mainnet'

View File

@ -0,0 +1,74 @@
version: 2
models:
- name: silver_bridge__allbridge_sent
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: BLOCK_TIMESTAMP
tests:
- not_null
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null
- name: ORIGIN_FROM_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: ORIGIN_TO_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: EVENT_INDEX
tests:
- not_null
- name: EVENT_NAME
tests:
- not_null
- name: BRIDGE_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: SENDER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: RECEIVER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: DESTINATION_CHAIN_RECEIVER
tests:
- not_null
- name: AMOUNT
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- DECIMAL
- FLOAT
- NUMBER
- name: TOKEN_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: _INSERTED_TIMESTAMP
tests:
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 3

View File

@ -0,0 +1,210 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = "block_number",
cluster_by = ['block_timestamp::DATE'],
tags = ['curated','reorg']
) }}
WITH base_evt AS (
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
'axelar' AS NAME,
event_index,
topics [0] :: STRING AS topic_0,
event_name,
TRY_TO_NUMBER(
decoded_flat :"amount" :: STRING
) AS amount,
decoded_flat :"destinationChain" :: STRING AS destinationChain,
LOWER(
decoded_flat :"destinationContractAddress" :: STRING
) AS destinationContractAddress,
decoded_flat :"payload" :: STRING AS payload,
origin_from_address AS recipient,
decoded_flat :"payloadHash" :: STRING AS payloadHash,
decoded_flat :"sender" :: STRING AS sender,
decoded_flat :"symbol" :: STRING AS symbol,
decoded_flat,
event_removed,
tx_status,
_log_id,
_inserted_timestamp
FROM
{{ ref('silver__decoded_logs') }}
WHERE
topics [0] :: STRING = '0x7e50569d26be643bda7757722291ec66b1be66d8283474ae3fab5a98f878a7a2'
AND contract_address = '0x6f015f16de9fc8791b234ef68d486d2bf203fba8'
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
{% endif %}
),
native_gas_paid AS (
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
'axelar' AS NAME,
event_index,
topics [0] :: STRING AS topic_0,
event_name,
TRY_TO_NUMBER(
decoded_flat :"amount" :: STRING
) AS amount,
decoded_flat :"destinationChain" :: STRING AS destinationChain,
LOWER(
decoded_flat :"destinationAddress" :: STRING
) AS destinationAddress,
TRY_TO_NUMBER(
decoded_flat :"gasFeeAmount" :: STRING
) AS gasFeeAmount,
decoded_flat :"payloadHash" :: STRING AS payloadHash,
decoded_flat :"refundAddress" :: STRING AS refundAddress,
decoded_flat :"sourceAddress" :: STRING AS sourceAddress,
decoded_flat :"symbol" :: STRING AS symbol,
decoded_flat,
event_removed,
tx_status,
_log_id,
_inserted_timestamp
FROM
{{ ref('silver__decoded_logs') }}
WHERE
topics [0] :: STRING = '0x999d431b58761213cf53af96262b67a069cbd963499fd8effd1e21556217b841'
AND contract_address = '0x2d5d7d31f671f86c782533cc367f14109a082712'
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
{% endif %}
),
transfers AS (
SELECT
block_number,
tx_hash,
event_index,
contract_address AS token_address,
_log_id,
_inserted_timestamp
FROM
{{ ref('silver__transfers') }}
WHERE
from_address = '0xce16f69375520ab01377ce7b88f5ba8c48f8d666'
AND to_address = '0x6f015f16de9fc8791b234ef68d486d2bf203fba8'
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
{% endif %}
),
FINAL AS (
SELECT
b.block_number,
b.block_timestamp,
b.origin_function_signature,
b.origin_from_address,
b.origin_to_address,
b.tx_hash,
b.event_index,
b.topic_0,
b.event_name,
b.event_removed,
b.tx_status,
b.contract_address AS bridge_address,
b.name AS platform,
b.sender,
CASE
WHEN b.recipient = '0x0000000000000000000000000000000000000000' THEN refundAddress
ELSE b.recipient
END AS receiver,
CASE
WHEN LOWER(
b.destinationChain
) = 'avalanche' THEN 'avalanche c-chain'
WHEN LOWER(
b.destinationChain
) = 'binance' THEN 'bnb smart chain mainnet'
WHEN LOWER(
b.destinationChain
) = 'celo' THEN 'celo mainnet'
WHEN LOWER(
b.destinationChain
) = 'ethereum' THEN 'ethereum mainnet'
WHEN LOWER(
b.destinationChain
) = 'fantom' THEN 'fantom opera'
WHEN LOWER(
b.destinationChain
) = 'polygon' THEN 'polygon mainnet'
ELSE LOWER(
b.destinationChain
)
END AS destination_chain,
b.destinationContractAddress AS destination_contract_address,
CASE
WHEN destination_chain IN (
'arbitrum',
'avalanche c-chain',
'base',
'bnb smart chain mainnet',
'celo mainnet',
'ethereum mainnet',
'fantom opera',
'filecoin',
'kava',
'linea',
'mantle',
'moonbeam',
'optimism',
'polygon mainnet',
'scroll'
) THEN receiver
ELSE destination_contract_address
END AS destination_chain_receiver,
b.amount,
b.payload,
b.payloadHash AS payload_hash,
b.symbol AS token_symbol,
t.token_address,
b._log_id,
b._inserted_timestamp
FROM
base_evt b
INNER JOIN transfers t
ON b.block_number = t.block_number
AND b.tx_hash = t.tx_hash
LEFT JOIN native_gas_paid n
ON n.block_number = b.block_number
AND n.tx_hash = b.tx_hash
)
SELECT
*
FROM
FINAL qualify (ROW_NUMBER() over (PARTITION BY _log_id
ORDER BY
_inserted_timestamp DESC)) = 1

View File

@ -0,0 +1,74 @@
version: 2
models:
- name: silver_bridge__axelar_contractcallwithtoken
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: BLOCK_TIMESTAMP
tests:
- not_null
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null
- name: ORIGIN_FROM_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: ORIGIN_TO_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: EVENT_INDEX
tests:
- not_null
- name: EVENT_NAME
tests:
- not_null
- name: BRIDGE_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: SENDER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: RECEIVER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: DESTINATION_CHAIN_RECEIVER
tests:
- not_null
- name: AMOUNT
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- DECIMAL
- FLOAT
- NUMBER
- name: TOKEN_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: _INSERTED_TIMESTAMP
tests:
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 3

View File

@ -0,0 +1,91 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = "block_number",
cluster_by = ['block_timestamp::DATE'],
tags = ['curated','reorg']
) }}
WITH base_evt AS (
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
'celer_cbridge' AS NAME,
event_index,
topics [0] :: STRING AS topic_0,
event_name,
TRY_TO_NUMBER(
decoded_flat :"amount" :: STRING
) AS amount,
TRY_TO_NUMBER(
decoded_flat :"dstChainId" :: STRING
) AS dstChainId,
TRY_TO_NUMBER(
decoded_flat :"maxSlippage" :: STRING
) AS maxSlippage,
TRY_TO_NUMBER(
decoded_flat :"nonce" :: STRING
) AS nonce,
decoded_flat :"receiver" :: STRING AS receiver,
decoded_flat :"sender" :: STRING AS sender,
decoded_flat :"token" :: STRING AS token,
decoded_flat :"transferId" :: STRING AS transferId,
decoded_flat,
event_removed,
tx_status,
_log_id,
_inserted_timestamp
FROM
{{ ref('silver__decoded_logs') }}
WHERE
topics [0] :: STRING = '0x89d8051e597ab4178a863a5190407b98abfeff406aa8db90c59af76612e58f01'
AND contract_address IN (
'0x88dcdc47d2f83a99cf0000fdf667a468bb958a78',
'0xa251c4691c1ffd7d9b128874c023427513d8ac5c',
'0xb5df797468e6e8f2cb293cd6e32939366e0f8733',
'0x02745032d2aeccdc90310d6cca32cb82c7e149dd',
'0xf5c6825015280cdfd0b56903f9f8b5a2233476f5'
)
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
{% endif %}
)
SELECT
block_number,
block_timestamp,
origin_function_signature,
origin_from_address,
origin_to_address,
tx_hash,
event_index,
topic_0,
event_name,
event_removed,
tx_status,
contract_address AS bridge_address,
NAME AS platform,
sender,
receiver,
receiver AS destination_chain_receiver,
amount,
dstChainId AS destination_chain_id,
maxSlippage AS max_slippage,
nonce,
token AS token_address,
transferId AS transfer_id,
_log_id,
_inserted_timestamp
FROM
base_evt

View File

@ -0,0 +1,74 @@
version: 2
models:
- name: silver_bridge__celer_cbridge_send
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: BLOCK_TIMESTAMP
tests:
- not_null
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null
- name: ORIGIN_FROM_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: ORIGIN_TO_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: EVENT_INDEX
tests:
- not_null
- name: EVENT_NAME
tests:
- not_null
- name: BRIDGE_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: SENDER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: RECEIVER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: DESTINATION_CHAIN_RECEIVER
tests:
- not_null
- name: AMOUNT
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- DECIMAL
- FLOAT
- NUMBER
- name: TOKEN_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: _INSERTED_TIMESTAMP
tests:
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 3

View File

@ -0,0 +1,119 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = "block_number",
tags = ['curated']
) }}
WITH base_contracts AS (
SELECT
contract_address,
MAX(block_number) AS block_number
FROM
{{ ref('silver__logs') }}
WHERE
topics [0] :: STRING = '0xe35dddd4ea75d7e9b3fe93af4f4e40e778c3da4074c9d93e7c6536f1e803c1eb'
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND contract_address NOT IN (
SELECT
DISTINCT contract_address
FROM
{{ this }}
)
{% endif %}
GROUP BY
1
),
function_sigs AS (
SELECT
'0xe9cdfe51' AS function_sig,
'ammWrapper' AS function_name
),
inputs AS (
SELECT
contract_address,
block_number,
function_sig,
function_name,
0 AS function_input,
CONCAT(
function_sig,
LPAD(
function_input,
64,
0
)
) AS DATA
FROM
base_contracts
JOIN function_sigs
ON 1 = 1
),
contract_reads AS (
SELECT
contract_address,
block_number,
function_sig,
function_name,
function_input,
DATA,
utils.udf_json_rpc_call(
'eth_call',
[{ 'to': contract_address, 'from': null, 'data': data }, utils.udf_int_to_hex(block_number) ]
) AS rpc_request,
live.udf_api(
node_url,
rpc_request
) AS read_output,
SYSDATE() AS _inserted_timestamp
FROM
inputs
JOIN {{ source(
'streamline_crosschain',
'node_mapping'
) }}
ON 1 = 1
AND chain = 'polygon'
),
reads_flat AS (
SELECT
read_output,
read_output :data :id :: STRING AS read_id,
read_output :data :result :: STRING AS read_result,
SPLIT(
read_id,
'-'
) AS read_id_object,
function_sig,
function_name,
function_input,
DATA,
contract_address,
block_number,
_inserted_timestamp
FROM
contract_reads
)
SELECT
read_output,
read_id,
read_result,
read_id_object,
function_sig,
function_name,
function_input,
DATA,
block_number,
contract_address,
CONCAT('0x', SUBSTR(read_result, 27, 40)) AS amm_wrapper_address,
_inserted_timestamp
FROM
reads_flat

View File

@ -0,0 +1,16 @@
version: 2
models:
- name: silver_bridge__hop_ammwrapper
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- CONTRACT_ADDRESS
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: AMM_WRAPPER_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+

View File

@ -0,0 +1,119 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = "block_number",
tags = ['curated']
) }}
WITH base_contracts AS (
SELECT
contract_address,
amm_wrapper_address,
block_number
FROM
{{ ref('silver_bridge__hop_ammwrapper') }}
{% if is_incremental() %}
WHERE
amm_wrapper_address NOT IN (
SELECT
DISTINCT amm_wrapper_address
FROM
{{ this }}
)
{% endif %}
),
function_sigs AS (
SELECT
'0x1ee1bf67' AS function_sig,
'l2CanonicalToken' AS function_name
),
inputs AS (
SELECT
amm_wrapper_address,
block_number,
function_sig,
function_name,
0 AS function_input,
CONCAT(
function_sig,
LPAD(
function_input,
64,
0
)
) AS DATA
FROM
base_contracts
JOIN function_sigs
ON 1 = 1
),
contract_reads AS (
SELECT
amm_wrapper_address,
block_number,
function_sig,
function_name,
function_input,
DATA,
utils.udf_json_rpc_call(
'eth_call',
[{ 'to': amm_wrapper_address, 'from': null, 'data': data }, utils.udf_int_to_hex(block_number) ]
) AS rpc_request,
live.udf_api(
node_url,
rpc_request
) AS read_output,
SYSDATE() AS _inserted_timestamp
FROM
inputs
JOIN {{ source(
'streamline_crosschain',
'node_mapping'
) }}
ON 1 = 1
AND chain = 'polygon'
),
reads_flat AS (
SELECT
read_output,
read_output :data :id :: STRING AS read_id,
read_output :data :result :: STRING AS read_result,
SPLIT(
read_id,
'-'
) AS read_id_object,
function_sig,
function_name,
function_input,
DATA,
amm_wrapper_address,
block_number,
_inserted_timestamp
FROM
contract_reads
)
SELECT
read_output,
read_id,
read_result,
read_id_object,
function_sig,
function_name,
function_input,
DATA,
block_number,
contract_address,
amm_wrapper_address,
CASE
WHEN contract_address = '0x58c61aee5ed3d748a1467085ed2650b697a66234' THEN '0xc5102fe9359fd9a28f877a67e36b0f050d81a3cc'
ELSE CONCAT('0x', SUBSTR(read_result, 27, 40))
END AS token_address,
_inserted_timestamp
FROM
reads_flat
LEFT JOIN base_contracts USING(amm_wrapper_address)
WHERE
token_address <> '0x'
AND token_address IS NOT NULL

View File

@ -0,0 +1,16 @@
version: 2
models:
- name: silver_bridge__hop_l2canonicaltoken
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- CONTRACT_ADDRESS
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: TOKEN_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+

View File

@ -0,0 +1,107 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = "block_number",
cluster_by = ['block_timestamp::DATE'],
tags = ['curated','reorg']
) }}
WITH base_evt AS (
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
'hop' AS NAME,
event_index,
topics [0] :: STRING AS topic_0,
event_name,
TRY_TO_NUMBER(
decoded_flat :"amount" :: STRING
) AS amount,
TRY_TO_NUMBER(
decoded_flat :"amountOutMin" :: STRING
) AS amountOutMin,
TRY_TO_NUMBER(
decoded_flat :"bonderFee" :: STRING
) AS bonderFee,
TRY_TO_NUMBER(
decoded_flat :"chainId" :: STRING
) AS chainId,
TRY_TO_TIMESTAMP(
decoded_flat :"deadline" :: STRING
) AS deadline,
TRY_TO_TIMESTAMP(
decoded_flat :"index" :: STRING
) AS INDEX,
decoded_flat :"recipient" :: STRING AS recipient,
decoded_flat :"transferId" :: STRING AS transferId,
decoded_flat :"transferNonce" :: STRING AS transferNonce,
decoded_flat,
event_removed,
tx_status,
_log_id,
_inserted_timestamp
FROM
{{ ref('silver__decoded_logs') }}
WHERE
topics [0] :: STRING = '0xe35dddd4ea75d7e9b3fe93af4f4e40e778c3da4074c9d93e7c6536f1e803c1eb'
AND origin_to_address IS NOT NULL
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
{% endif %}
),
hop_tokens AS (
SELECT
block_number,
contract_address,
amm_wrapper_address,
token_address,
_inserted_timestamp
FROM
{{ ref('silver_bridge__hop_l2canonicaltoken') }}
)
SELECT
block_number,
block_timestamp,
origin_function_signature,
origin_from_address,
origin_to_address,
tx_hash,
event_index,
topic_0,
event_name,
event_removed,
tx_status,
contract_address AS bridge_address,
amm_wrapper_address,
NAME AS platform,
origin_from_address AS sender,
recipient AS receiver,
receiver AS destination_chain_receiver,
chainId AS destination_chain_id,
token_address,
amount,
amountOutMin AS amount_out_min,
bonderFee AS bonder_fee,
deadline,
INDEX,
transferId AS transfer_id,
transferNonce AS transfer_nonce,
_log_id,
_inserted_timestamp
FROM
base_evt b
LEFT JOIN hop_tokens h USING(contract_address)
WHERE
token_address IS NOT NULL

View File

@ -0,0 +1,74 @@
version: 2
models:
- name: silver_bridge__hop_transfersent
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: BLOCK_TIMESTAMP
tests:
- not_null
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null
- name: ORIGIN_FROM_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: ORIGIN_TO_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: EVENT_INDEX
tests:
- not_null
- name: EVENT_NAME
tests:
- not_null
- name: BRIDGE_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: SENDER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: RECEIVER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: DESTINATION_CHAIN_RECEIVER
tests:
- not_null
- name: AMOUNT
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- DECIMAL
- FLOAT
- NUMBER
- name: TOKEN_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: _INSERTED_TIMESTAMP
tests:
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 3

View File

@ -0,0 +1,176 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = "block_number",
cluster_by = ['block_timestamp::DATE'],
tags = ['curated','reorg']
) }}
WITH token_transfers AS (
SELECT
block_number,
block_timestamp,
origin_function_signature,
origin_from_address,
origin_to_address,
tx_hash,
event_index,
contract_address,
from_address,
to_address,
raw_amount,
_log_id,
_inserted_timestamp
FROM
{{ ref('silver__transfers') }}
WHERE
from_address <> '0x0000000000000000000000000000000000000000'
AND to_address = '0x25ab3efd52e6470681ce037cd546dc60726948d3'
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
{% endif %}
),
native_transfers AS (
SELECT
et.block_number,
et.block_timestamp,
et.tx_hash,
tx.from_address AS origin_from_address,
tx.to_address AS origin_to_address,
tx.origin_function_signature,
et.from_address,
et.to_address,
amount_precise_raw,
identifier,
_call_id,
et._inserted_timestamp
FROM
{{ ref('silver__native_transfers') }}
et
INNER JOIN {{ ref('silver__transactions') }}
tx
ON et.block_number = tx.block_number
AND et.tx_hash = tx.tx_hash
WHERE
et.to_address = '0x25ab3efd52e6470681ce037cd546dc60726948d3'
{% if is_incremental() %}
AND et._inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
{% endif %}
),
all_transfers AS (
SELECT
block_number,
block_timestamp,
origin_from_address,
origin_to_address,
origin_function_signature,
tx_hash,
event_index,
'Transfer' AS event_name,
to_address AS bridge_address,
from_address AS sender,
to_address AS receiver,
raw_amount AS amount_unadj,
contract_address AS token_address,
{{ dbt_utils.generate_surrogate_key(
['_log_id']
) }} AS _id,
_inserted_timestamp
FROM
token_transfers
UNION ALL
SELECT
block_number,
block_timestamp,
origin_from_address,
origin_to_address,
origin_function_signature,
tx_hash,
NULL AS event_index,
NULL AS event_name,
to_address AS bridge_address,
from_address AS sender,
to_address AS receiver,
amount_precise_raw AS amount_unadj,
'0x0d500b1d8e8ef31e21c99d1db9a6444d3adf1270' AS token_address,
{{ dbt_utils.generate_surrogate_key(
['_call_id']
) }} AS _id,
_inserted_timestamp
FROM
native_transfers
),
dst_info AS (
SELECT
block_number,
tx_hash,
topics [1] :: STRING AS encoded_data,
SUBSTR(RIGHT(encoded_data, 12), 1, 4) AS destination_chain_id,
_log_id,
_inserted_timestamp
FROM
{{ ref('silver__logs') }}
WHERE
contract_address = '0x25ab3efd52e6470681ce037cd546dc60726948d3'
AND topics [0] :: STRING = '0x5ce4019f772fda6cb703b26bce3ec3006eb36b73f1d3a0eb441213317d9f5e9d'
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '16 hours'
FROM
{{ this }}
)
{% endif %}
)
SELECT
t.block_number,
block_timestamp,
origin_from_address,
origin_to_address,
origin_function_signature,
t.tx_hash,
event_index,
event_name,
'meson' AS platform,
bridge_address,
sender,
receiver,
CASE
WHEN origin_from_address = '0x0000000000000000000000000000000000000000' THEN sender
ELSE origin_from_address
END AS destination_chain_receiver,
amount_unadj,
destination_chain_id,
COALESCE(LOWER(chain),'other') AS destination_chain,
token_address,
_id,
t._inserted_timestamp
FROM
all_transfers t
INNER JOIN dst_info d
ON t.tx_hash = d.tx_hash
AND t.block_number = d.block_number
LEFT JOIN {{ ref('silver_bridge__meson_chain_id_seed') }}
s
ON d.destination_chain_id :: STRING = RIGHT(
s.short_coin_type,
4
) :: STRING
WHERE
origin_to_address IS NOT NULL qualify (ROW_NUMBER() over (PARTITION BY _id
ORDER BY
t._inserted_timestamp DESC)) = 1

View File

@ -0,0 +1,52 @@
version: 2
models:
- name: silver_bridge__meson_transfers
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: BLOCK_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_LTZ
- TIMESTAMP_NTZ
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TOKEN_ADDRESS
tests:
- not_null
- name: SENDER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: DESTINATION_CHAIN_RECEIVER
tests:
- not_null
- name: BRIDGE_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: AMOUNT_UNADJ
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- DECIMAL
- FLOAT
- NUMBER
- name: _INSERTED_TIMESTAMP
tests:
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 3

View File

@ -0,0 +1,78 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = "block_number",
cluster_by = ['block_timestamp::DATE'],
tags = ['curated','reorg']
) }}
WITH base_evt AS (
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
'multichain' AS NAME,
event_index,
topics [0] :: STRING AS topic_0,
event_name,
TRY_TO_NUMBER(
decoded_flat :"amount" :: STRING
) AS amount,
decoded_flat :"from" :: STRING AS from_address,
decoded_flat :"receiver" :: STRING AS receiver,
decoded_flat :"swapoutID" :: STRING AS swapoutID,
TRY_TO_NUMBER(
decoded_flat :"toChainID" :: STRING
) AS toChainID,
decoded_flat :"token" :: STRING AS token,
decoded_flat,
event_removed,
tx_status,
_log_id,
_inserted_timestamp
FROM
{{ ref('silver__decoded_logs') }}
WHERE
topics [0] :: STRING = '0x0d969ae475ff6fcaf0dcfa760d4d8607244e8d95e9bf426f8d5d69f9a3e525af'
AND contract_address = '0x1633d66ca91ce4d81f63ea047b7b19beb92df7f3'
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
{% endif %}
)
SELECT
block_number,
block_timestamp,
origin_function_signature,
origin_from_address,
origin_to_address,
tx_hash,
event_index,
topic_0,
event_name,
event_removed,
tx_status,
contract_address AS bridge_address,
NAME AS platform,
LOWER(from_address) AS sender,
LOWER(receiver) AS receiver,
LOWER(receiver) AS destination_chain_receiver,
amount,
toChainID AS destination_chain_id,
token AS token_address,
swapoutID AS swapout_id,
_log_id,
_inserted_timestamp
FROM
base_evt
WHERE destination_chain_id <> 0

View File

@ -0,0 +1,69 @@
version: 2
models:
- name: silver_bridge__multichain_v7_loganyswapout
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: BLOCK_TIMESTAMP
tests:
- not_null
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null
- name: ORIGIN_FROM_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: ORIGIN_TO_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: EVENT_INDEX
tests:
- not_null
- name: EVENT_NAME
tests:
- not_null
- name: BRIDGE_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: SENDER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: RECEIVER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: DESTINATION_CHAIN_RECEIVER
tests:
- not_null
- name: AMOUNT
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- DECIMAL
- FLOAT
- NUMBER
- name: TOKEN_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+

View File

@ -0,0 +1,624 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = ['block_number','platform','version'],
cluster_by = ['block_timestamp::DATE'],
tags = ['curated','reorg']
) }}
WITH across AS (
SELECT
block_number,
block_timestamp,
origin_from_address,
origin_to_address,
origin_function_signature,
tx_hash,
event_index,
bridge_address,
event_name,
platform,
'v1' AS version,
sender,
receiver,
destination_chain_receiver,
destination_chain_id :: STRING AS destination_chain_id,
NULL AS destination_chain,
token_address,
NULL AS token_symbol,
amount AS amount_unadj,
_log_id AS _id,
_inserted_timestamp
FROM
{{ ref('silver_bridge__across_fundsdeposited') }}
{% if is_incremental() %}
WHERE
_inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '36 hours'
FROM
{{ this }}
)
{% endif %}
),
allbridge AS (
SELECT
block_number,
block_timestamp,
origin_from_address,
origin_to_address,
origin_function_signature,
tx_hash,
event_index,
bridge_address,
event_name,
platform,
'v1' AS version,
sender,
receiver,
destination_chain_receiver,
NULL AS destination_chain_id,
destination_chain,
token_address,
NULL AS token_symbol,
amount AS amount_unadj,
_log_id AS _id,
_inserted_timestamp
FROM
{{ ref('silver_bridge__allbridge_sent') }}
{% if is_incremental() %}
WHERE
_inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '36 hours'
FROM
{{ this }}
)
{% endif %}
),
axelar AS (
SELECT
block_number,
block_timestamp,
origin_from_address,
origin_to_address,
origin_function_signature,
tx_hash,
event_index,
bridge_address,
event_name,
platform,
'v1' AS version,
sender,
receiver,
destination_chain_receiver,
NULL AS destination_chain_id,
destination_chain,
token_address,
token_symbol,
amount AS amount_unadj,
_log_id AS _id,
_inserted_timestamp
FROM
{{ ref('silver_bridge__axelar_contractcallwithtoken') }}
{% if is_incremental() %}
WHERE
_inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '36 hours'
FROM
{{ this }}
)
{% endif %}
),
celer_cbridge AS (
SELECT
block_number,
block_timestamp,
origin_from_address,
origin_to_address,
origin_function_signature,
tx_hash,
event_index,
bridge_address,
event_name,
platform,
'v1' AS version,
sender,
receiver,
destination_chain_receiver,
destination_chain_id :: STRING AS destination_chain_id,
NULL AS destination_chain,
token_address,
NULL AS token_symbol,
amount AS amount_unadj,
_log_id AS _id,
_inserted_timestamp
FROM
{{ ref('silver_bridge__celer_cbridge_send') }}
{% if is_incremental() %}
WHERE
_inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '36 hours'
FROM
{{ this }}
)
{% endif %}
),
hop AS (
SELECT
block_number,
block_timestamp,
origin_from_address,
origin_to_address,
origin_function_signature,
tx_hash,
event_index,
bridge_address,
event_name,
platform,
'v1' AS version,
sender,
receiver,
destination_chain_receiver,
destination_chain_id :: STRING AS destination_chain_id,
NULL AS destination_chain,
token_address,
NULL AS token_symbol,
amount AS amount_unadj,
_log_id AS _id,
_inserted_timestamp
FROM
{{ ref('silver_bridge__hop_transfersent') }}
{% if is_incremental() %}
WHERE
_inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '36 hours'
FROM
{{ this }}
)
{% endif %}
),
meson AS (
SELECT
block_number,
block_timestamp,
origin_from_address,
origin_to_address,
origin_function_signature,
tx_hash,
event_index,
bridge_address,
event_name,
platform,
'v1' AS version,
sender,
receiver,
destination_chain_receiver,
destination_chain_id :: STRING AS destination_chain_id,
destination_chain,
token_address,
NULL AS token_symbol,
amount_unadj,
_id,
_inserted_timestamp
FROM
{{ ref('silver_bridge__meson_transfers') }}
{% if is_incremental() %}
WHERE
_inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '36 hours'
FROM
{{ this }}
)
{% endif %}
),
multichain AS (
SELECT
block_number,
block_timestamp,
origin_from_address,
origin_to_address,
origin_function_signature,
tx_hash,
event_index,
bridge_address,
event_name,
platform,
'v1' AS version,
sender,
receiver,
destination_chain_receiver,
destination_chain_id :: STRING AS destination_chain_id,
NULL AS destination_chain,
token_address,
NULL AS token_symbol,
amount AS amount_unadj,
_log_id AS _id,
_inserted_timestamp
FROM
{{ ref('silver_bridge__multichain_v7_loganyswapout') }}
{% if is_incremental() %}
WHERE
_inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '36 hours'
FROM
{{ this }}
)
{% endif %}
),
stargate AS (
SELECT
block_number,
block_timestamp,
origin_from_address,
origin_to_address,
origin_function_signature,
tx_hash,
event_index,
bridge_address,
event_name,
platform,
'v1' AS version,
sender,
receiver,
destination_chain_receiver,
destination_chain_id :: STRING AS destination_chain_id,
destination_chain,
token_address,
NULL AS token_symbol,
amount_unadj,
_log_id AS _id,
_inserted_timestamp
FROM
{{ ref('silver_bridge__stargate_swap') }}
{% if is_incremental() %}
WHERE
_inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '36 hours'
FROM
{{ this }}
)
{% endif %}
),
symbiosis AS (
SELECT
block_number,
block_timestamp,
origin_from_address,
origin_to_address,
origin_function_signature,
tx_hash,
event_index,
bridge_address,
event_name,
platform,
'v1' AS version,
sender,
receiver,
destination_chain_receiver,
destination_chain_id :: STRING AS destination_chain_id,
NULL AS destination_chain,
token_address,
NULL AS token_symbol,
amount AS amount_unadj,
_log_id AS _id,
_inserted_timestamp
FROM
{{ ref('silver_bridge__symbiosis_synthesizerequest') }}
{% if is_incremental() %}
WHERE
_inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '36 hours'
FROM
{{ this }}
)
{% endif %}
),
synapse_tb AS (
SELECT
block_number,
block_timestamp,
origin_from_address,
origin_to_address,
origin_function_signature,
tx_hash,
event_index,
bridge_address,
event_name,
platform,
'v1-tb' AS version,
sender,
receiver,
destination_chain_receiver,
destination_chain_id :: STRING AS destination_chain_id,
NULL AS destination_chain,
token_address,
NULL AS token_symbol,
amount AS amount_unadj,
_log_id AS _id,
_inserted_timestamp
FROM
{{ ref('silver_bridge__synapse_token_bridge') }}
{% if is_incremental() %}
WHERE
_inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '36 hours'
FROM
{{ this }}
)
{% endif %}
),
synapse_tbs AS (
SELECT
block_number,
block_timestamp,
origin_from_address,
origin_to_address,
origin_function_signature,
tx_hash,
event_index,
bridge_address,
event_name,
platform,
'v1-tbs' AS version,
sender,
receiver,
destination_chain_receiver,
destination_chain_id :: STRING AS destination_chain_id,
NULL AS destination_chain,
token_address,
NULL AS token_symbol,
amount AS amount_unadj,
_log_id AS _id,
_inserted_timestamp
FROM
{{ ref('silver_bridge__synapse_tokenbridgeandswap') }}
{% if is_incremental() %}
WHERE
_inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '36 hours'
FROM
{{ this }}
)
{% endif %}
),
wormhole AS (
SELECT
block_number,
block_timestamp,
origin_from_address,
origin_to_address,
origin_function_signature,
tx_hash,
event_index,
bridge_address,
event_name,
platform,
'v1' AS version,
sender,
receiver,
destination_chain_receiver,
destination_chain_id :: STRING AS destination_chain_id,
destination_chain,
token_address,
NULL AS token_symbol,
amount_unadj,
_id,
_inserted_timestamp
FROM
{{ ref('silver_bridge__wormhole_transfers') }}
{% if is_incremental() %}
WHERE
_inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '36 hours'
FROM
{{ this }}
)
{% endif %}
),
all_protocols AS (
SELECT
*
FROM
across
UNION ALL
SELECT
*
FROM
allbridge
UNION ALL
SELECT
*
FROM
axelar
UNION ALL
SELECT
*
FROM
celer_cbridge
UNION ALL
SELECT
*
FROM
hop
UNION ALL
SELECT
*
FROM
meson
UNION ALL
SELECT
*
FROM
multichain
UNION ALL
SELECT
*
FROM
stargate
UNION ALL
SELECT
*
FROM
symbiosis
UNION ALL
SELECT
*
FROM
synapse_tb
UNION ALL
SELECT
*
FROM
synapse_tbs
UNION ALL
SELECT
*
FROM
wormhole
),
FINAL AS (
SELECT
block_number,
block_timestamp,
origin_from_address,
origin_to_address,
origin_function_signature,
tx_hash,
event_index,
bridge_address,
event_name,
platform,
version,
sender,
receiver,
destination_chain_receiver,
CASE
WHEN platform IN (
'stargate',
'wormhole',
'meson'
) THEN destination_chain_id :: STRING
WHEN d.chain_id IS NULL THEN destination_chain_id :: STRING
ELSE d.chain_id :: STRING
END AS destination_chain_id,
CASE
WHEN platform IN (
'stargate',
'wormhole',
'meson'
) THEN LOWER(destination_chain)
WHEN d.chain IS NULL THEN LOWER(destination_chain)
ELSE LOWER(
d.chain
)
END AS destination_chain,
b.token_address,
CASE
WHEN platform = 'axelar' THEN COALESCE(
C.token_symbol,
b.token_symbol
)
ELSE C.token_symbol
END AS token_symbol,
C.token_decimals AS token_decimals,
amount_unadj,
CASE
WHEN C.token_decimals IS NOT NULL THEN (amount_unadj / pow(10, C.token_decimals))
ELSE amount_unadj
END AS amount,
CASE
WHEN C.token_decimals IS NOT NULL THEN ROUND(
amount * p.price,
2
)
ELSE NULL
END AS amount_usd_unadj,
_id,
b._inserted_timestamp
FROM
all_protocols b
LEFT JOIN {{ ref('silver__contracts') }} C
ON b.token_address = C.contract_address
LEFT JOIN {{ ref('price__ez_hourly_token_prices') }}
p
ON b.token_address = p.token_address
AND DATE_TRUNC(
'hour',
block_timestamp
) = p.hour
LEFT JOIN {{ source(
'external_gold_defillama',
'dim_chains'
) }}
d
ON d.chain_id :: STRING = b.destination_chain_id :: STRING
OR LOWER(
d.chain
) = LOWER(
b.destination_chain
)
)
SELECT
block_number,
block_timestamp,
origin_from_address,
origin_to_address,
origin_function_signature,
tx_hash,
event_index,
bridge_address,
event_name,
platform,
version,
sender,
receiver,
destination_chain_receiver,
destination_chain_id,
destination_chain,
token_address,
token_symbol,
token_decimals,
amount_unadj,
amount,
CASE
WHEN amount_usd_unadj < 1e+15 THEN amount_usd_unadj
ELSE NULL
END AS amount_usd,
_id,
_inserted_timestamp,
{{ dbt_utils.generate_surrogate_key(
['_id']
) }} AS complete_bridge_activity_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp,
'{{ invocation_id }}' AS _invocation_id
FROM
FINAL
WHERE destination_chain <> 'polygon'
qualify (ROW_NUMBER() over (PARTITION BY _id
ORDER BY
_inserted_timestamp DESC)) = 1

View File

@ -0,0 +1,81 @@
version: 2
models:
- name: silver_bridge__complete_bridge_activity
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: BLOCK_TIMESTAMP
tests:
- not_null
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null
- name: ORIGIN_FROM_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: ORIGIN_TO_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: EVENT_INDEX
tests:
- not_null:
where: VERSION <> 'v1-native' AND PLATFORM NOT IN ('wormhole','meson')
- name: EVENT_NAME
tests:
- not_null:
where: VERSION <> 'v1-native' AND PLATFORM NOT IN ('wormhole','meson')
- name: BRIDGE_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: DESTINATION_CHAIN
- name: DESTINATION_CHAIN_RECEIVER
tests:
- not_null
- name: PLATFORM
- not_null
- name: VERSION
- not_null
- name: SENDER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: RECEIVER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: AMOUNT_UNADJ
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- DECIMAL
- FLOAT
- NUMBER
- name: TOKEN_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: _INSERTED_TIMESTAMP
tests:
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 3

View File

@ -0,0 +1,124 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = "block_number",
tags = ['curated']
) }}
WITH base_contracts AS (
SELECT
tx_hash,
block_number,
block_timestamp,
from_address,
to_address AS contract_address,
_call_id,
_inserted_timestamp
FROM
{{ ref('silver__traces') }}
WHERE
from_address = LOWER('0x808d7c71ad2ba3FA531b068a2417C63106BC0949')
AND TYPE ILIKE 'create%'
AND tx_status ILIKE 'success'
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND to_address NOT IN (
SELECT
DISTINCT pool_address
FROM
{{ this }}
)
{% endif %}
),
function_sigs AS (
SELECT
'0xfc0c546a' AS function_sig,
'token' AS function_name
),
inputs AS (
SELECT
contract_address,
block_number,
function_sig,
function_name,
0 AS function_input,
CONCAT(
function_sig,
LPAD(
function_input,
64,
0
)
) AS DATA
FROM
base_contracts
JOIN function_sigs
ON 1 = 1
),
contract_reads AS (
SELECT
contract_address,
block_number,
function_sig,
function_name,
function_input,
DATA,
utils.udf_json_rpc_call(
'eth_call',
[{ 'to': contract_address, 'from': null, 'data': data }, utils.udf_int_to_hex(block_number) ]
) AS rpc_request,
live.udf_api(
node_url,
rpc_request
) AS read_output,
SYSDATE() AS _inserted_timestamp
FROM
inputs
JOIN {{ source(
'streamline_crosschain',
'node_mapping'
) }}
ON 1 = 1
AND chain = 'polygon'
),
reads_flat AS (
SELECT
read_output,
read_output :data :id :: STRING AS read_id,
read_output :data :result :: STRING AS read_result,
SPLIT(
read_id,
'-'
) AS read_id_object,
function_sig,
function_name,
function_input,
DATA,
contract_address,
block_number,
_inserted_timestamp
FROM
contract_reads
)
SELECT
read_output,
read_id,
read_result,
read_id_object,
function_sig,
function_name,
function_input,
DATA,
block_number,
contract_address AS pool_address,
CONCAT('0x', SUBSTR(read_result, 27, 40)) AS token_address,
_inserted_timestamp
FROM
reads_flat

View File

@ -0,0 +1,16 @@
version: 2
models:
- name: silver_bridge__stargate_createpool
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- POOL_ADDRESS
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: TOKEN_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+

View File

@ -0,0 +1,111 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = "block_number",
cluster_by = ['block_timestamp::DATE'],
tags = ['curated','reorg']
) }}
WITH pools AS (
SELECT
pool_address,
LOWER(token_address) AS token_address
FROM
{{ ref('silver_bridge__stargate_createpool') }}
),
base_evt AS (
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
'stargate' AS NAME,
event_index,
topics [0] :: STRING AS topic_0,
event_name,
TRY_TO_NUMBER(
decoded_flat :"amountSD" :: STRING
) AS amountSD,
TRY_TO_NUMBER(
decoded_flat :"chainId" :: STRING
) AS chainId,
CASE
WHEN chainId < 100 THEN chainId + 100
ELSE chainId
END AS destination_chain_id,
TRY_TO_NUMBER(
decoded_flat :"dstPoolId" :: STRING
) AS dstPoolId,
TRY_TO_NUMBER(
decoded_flat :"eqFee" :: STRING
) AS eqFee,
TRY_TO_NUMBER(
decoded_flat :"eqReward" :: STRING
) AS eqReward,
TRY_TO_NUMBER(
decoded_flat :"amountSD" :: STRING
) AS lpFee,
TRY_TO_NUMBER(
decoded_flat :"amountSD" :: STRING
) AS protocolFee,
decoded_flat :"from" :: STRING AS from_address,
decoded_flat,
token_address,
event_removed,
tx_status,
_log_id,
_inserted_timestamp
FROM
{{ ref('silver__decoded_logs') }}
d
INNER JOIN pools p
ON d.contract_address = p.pool_address
WHERE
topics [0] :: STRING = '0x34660fc8af304464529f48a778e03d03e4d34bcd5f9b6f0cfbf3cd238c642f7f'
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
{% endif %}
)
SELECT
block_number,
block_timestamp,
origin_function_signature,
origin_from_address,
origin_to_address,
tx_hash,
event_index,
topic_0,
event_name,
event_removed,
tx_status,
LOWER('0x9d1B1669c73b033DFe47ae5a0164Ab96df25B944') AS bridge_address,
NAME AS platform,
from_address AS sender,
from_address AS receiver,
receiver AS destination_chain_receiver,
amountSD AS amount_unadj,
destination_chain_id,
LOWER(chain_name) AS destination_chain,
dstPoolId AS destination_pool_id,
eqFee AS fee,
eqReward AS reward,
lpFee AS lp_fee,
protocolFee AS protocol_fee,
token_address,
_log_id,
_inserted_timestamp
FROM
base_evt b
LEFT JOIN {{ ref('silver_bridge__stargate_chain_id_seed') }}
s
ON b.destination_chain_id :: STRING = s.chain_id :: STRING

View File

@ -0,0 +1,74 @@
version: 2
models:
- name: silver_bridge__stargate_swap
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: BLOCK_TIMESTAMP
tests:
- not_null
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null
- name: ORIGIN_FROM_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: ORIGIN_TO_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: EVENT_INDEX
tests:
- not_null
- name: EVENT_NAME
tests:
- not_null
- name: BRIDGE_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: SENDER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: RECEIVER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: DESTINATION_CHAIN_RECEIVER
tests:
- not_null
- name: AMOUNT_UNADJ
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- DECIMAL
- FLOAT
- NUMBER
- name: TOKEN_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: _INSERTED_TIMESTAMP
tests:
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 3

View File

@ -0,0 +1,82 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = "block_number",
cluster_by = ['block_timestamp::DATE'],
tags = ['curated','reorg']
) }}
WITH base_evt AS (
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
'symbiosis' AS NAME,
event_index,
topics [0] :: STRING AS topic_0,
event_name,
TRY_TO_NUMBER(
decoded_flat :"amount" :: STRING
) AS amount,
TRY_TO_NUMBER(
decoded_flat :"chainID" :: STRING
) AS chainID,
decoded_flat :"from" :: STRING AS from_address,
decoded_flat :"id" :: STRING AS id,
decoded_flat :"revertableAddress" :: STRING AS revertableAddress,
decoded_flat :"to" :: STRING AS to_address,
decoded_flat :"token" :: STRING AS token,
decoded_flat,
event_removed,
tx_status,
_log_id,
_inserted_timestamp
FROM
{{ ref('silver__decoded_logs') }}
WHERE
topics [0] :: STRING = '0x31325fe0a1a2e6a5b1e41572156ba5b4e94f0fae7e7f63ec21e9b5ce1e4b3eab'
AND contract_address IN (
'0xb8f275fbf7a959f4bce59999a2ef122a099e81a8',
'0x3338be49a5f60e2593337919f9ad7098e9a7dd7e'
)
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
{% endif %}
)
SELECT
block_number,
block_timestamp,
origin_function_signature,
origin_from_address,
origin_to_address,
tx_hash,
event_index,
topic_0,
event_name,
event_removed,
tx_status,
contract_address AS bridge_address,
NAME AS platform,
from_address AS sender,
to_address AS receiver,
receiver AS destination_chain_receiver,
amount,
chainID AS destination_chain_id,
id,
revertableAddress AS revertable_address,
token AS token_address,
_log_id,
_inserted_timestamp
FROM
base_evt

View File

@ -0,0 +1,74 @@
version: 2
models:
- name: silver_bridge__symbiosis_synthesizerequest
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: BLOCK_TIMESTAMP
tests:
- not_null
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null
- name: ORIGIN_FROM_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: ORIGIN_TO_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: EVENT_INDEX
tests:
- not_null
- name: EVENT_NAME
tests:
- not_null
- name: BRIDGE_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: SENDER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: RECEIVER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: DESTINATION_CHAIN_RECEIVER
tests:
- not_null
- name: AMOUNT
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- DECIMAL
- FLOAT
- NUMBER
- name: TOKEN_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: _INSERTED_TIMESTAMP
tests:
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 3

View File

@ -0,0 +1,85 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = "block_number",
cluster_by = ['block_timestamp::DATE'],
tags = ['curated','reorg']
) }}
WITH base_evt AS (
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
'synapse' AS NAME,
event_index,
topics [0] :: STRING AS topic_0,
event_name,
TRY_TO_NUMBER(
decoded_flat :"amount" :: STRING
) AS amount,
TRY_TO_NUMBER(
decoded_flat :"chainId" :: STRING
) AS chainId,
decoded_flat :"to" :: STRING AS to_address,
decoded_flat :"token" :: STRING AS token,
decoded_flat,
event_removed,
tx_status,
_log_id,
_inserted_timestamp
FROM
{{ ref('silver__decoded_logs') }}
WHERE
topics [0] :: STRING IN (
'0xdc5bad4651c5fbe9977a696aadc65996c468cde1448dd468ec0d83bf61c4b57c',
--redeem
'0xda5273705dbef4bf1b902a131c2eac086b7e1476a8ab0cb4da08af1fe1bd8e3b' --deposit
)
AND contract_address IN (
'0x8f5bbb2bb8c2ee94639e55d5f41de9b4839c1280',
'0x2119a5c9279a13ec0de5e30d572b316f1cfca567',
'0x0efc29e196da2e81afe96edd041bedcdf9e74893',
'0x5f06745ee8a2001198a379bafbd0361475f3cfc3',
'0x7103a324f423b8a4d4cc1c4f2d5b374af4f0bab5'
)
AND origin_to_address IS NOT NULL
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
{% endif %}
)
SELECT
block_number,
block_timestamp,
origin_function_signature,
origin_from_address,
origin_to_address,
tx_hash,
event_index,
topic_0,
event_name,
event_removed,
tx_status,
contract_address AS bridge_address,
NAME AS platform,
amount,
origin_from_address AS sender,
to_address AS receiver,
receiver AS destination_chain_receiver,
chainId AS destination_chain_id,
token AS token_address,
_log_id,
_inserted_timestamp
FROM
base_evt

View File

@ -0,0 +1,74 @@
version: 2
models:
- name: silver_bridge__synapse_token_bridge
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: BLOCK_TIMESTAMP
tests:
- not_null
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null
- name: ORIGIN_FROM_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: ORIGIN_TO_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: EVENT_INDEX
tests:
- not_null
- name: EVENT_NAME
tests:
- not_null
- name: BRIDGE_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: SENDER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: RECEIVER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: DESTINATION_CHAIN_RECEIVER
tests:
- not_null
- name: AMOUNT
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- DECIMAL
- FLOAT
- NUMBER
- name: TOKEN_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: _INSERTED_TIMESTAMP
tests:
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 3

View File

@ -0,0 +1,93 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = "block_number",
cluster_by = ['block_timestamp::DATE'],
tags = ['curated','reorg']
) }}
WITH base_evt AS (
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
'synapse' AS NAME,
event_index,
topics [0] :: STRING AS topic_0,
event_name,
TRY_TO_NUMBER(
decoded_flat :"amount" :: STRING
) AS amount,
TRY_TO_NUMBER(
decoded_flat :"chainId" :: STRING
) AS chainId,
TRY_TO_TIMESTAMP(
decoded_flat :"deadline" :: STRING
) AS deadline,
TRY_TO_NUMBER(
decoded_flat :"minDy" :: STRING
) AS minDy,
decoded_flat :"to" :: STRING AS to_address,
decoded_flat :"token" :: STRING AS token,
TRY_TO_NUMBER(
decoded_flat :"tokenIndexFrom" :: STRING
) AS tokenIndexFrom,
TRY_TO_NUMBER(
decoded_flat :"tokenIndexTo" :: STRING
) AS tokenIndexTo,
decoded_flat,
event_removed,
tx_status,
_log_id,
_inserted_timestamp
FROM
{{ ref('silver__decoded_logs') }}
WHERE
topics [0] :: STRING = '0x91f25e9be0134ec851830e0e76dc71e06f9dade75a9b84e9524071dbbc319425'
AND contract_address IN (
'0x8f5bbb2bb8c2ee94639e55d5f41de9b4839c1280',
'0x0efc29e196da2e81afe96edd041bedcdf9e74893'
)
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
{% endif %}
)
SELECT
block_number,
block_timestamp,
origin_function_signature,
origin_from_address,
origin_to_address,
tx_hash,
event_index,
topic_0,
event_name,
event_removed,
tx_status,
contract_address AS bridge_address,
NAME AS platform,
origin_from_address AS sender,
to_address AS receiver,
receiver AS destination_chain_receiver,
amount,
chainId AS destination_chain_id,
token AS token_address,
deadline,
minDy AS min_dy,
tokenIndexFrom AS token_index_from,
tokenIndexTo AS token_index_to,
_log_id,
_inserted_timestamp
FROM
base_evt

View File

@ -0,0 +1,74 @@
version: 2
models:
- name: silver_bridge__synapse_tokenbridgeandswap
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: BLOCK_TIMESTAMP
tests:
- not_null
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null
- name: ORIGIN_FROM_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: ORIGIN_TO_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: EVENT_INDEX
tests:
- not_null
- name: EVENT_NAME
tests:
- not_null
- name: BRIDGE_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: SENDER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: RECEIVER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: DESTINATION_CHAIN_RECEIVER
tests:
- not_null
- name: AMOUNT
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- DECIMAL
- FLOAT
- NUMBER
- name: TOKEN_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: _INSERTED_TIMESTAMP
tests:
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 3

View File

@ -0,0 +1,244 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = "block_number",
cluster_by = ['block_timestamp::DATE'],
tags = ['curated','reorg']
) }}
WITH token_transfers AS (
SELECT
tr.block_number,
tr.block_timestamp,
tr.origin_function_signature,
tr.origin_from_address,
tr.origin_to_address,
tr.tx_hash,
event_index,
tr.contract_address,
tr.from_address,
tr.to_address,
raw_amount,
regexp_substr_all(SUBSTR(input_data, 11, len(input_data)), '.{64}') AS segmented_data,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [2] :: STRING
)
) AS destination_chain_id,
CONCAT(
'0x',
segmented_data [3] :: STRING
) AS recipient1,
CONCAT('0x', SUBSTR(segmented_data [3] :: STRING, 25, 40)) AS recipient2,
LENGTH(
REGEXP_SUBSTR(
segmented_data [3] :: STRING,
'^(0*)'
)
) AS len,
CASE
WHEN len >= 24 THEN recipient2
ELSE recipient1
END AS destination_recipient_address,
CONCAT('0x', SUBSTR(segmented_data [0] :: STRING, 25, 40)) AS token,
TRY_TO_NUMBER(utils.udf_hex_to_int(segmented_data [1] :: STRING)) AS amount,
utils.udf_hex_to_int(
segmented_data [4] :: STRING
) AS arbiterFee,
utils.udf_hex_to_int(
segmented_data [5] :: STRING
) AS nonce,
_log_id,
tr._inserted_timestamp
FROM
{{ ref('silver__transfers') }}
tr
INNER JOIN {{ ref('silver__transactions') }}
tx
ON tr.block_number = tx.block_number
AND tr.tx_hash = tx.tx_hash
WHERE
tr.from_address <> '0x0000000000000000000000000000000000000000'
AND tr.to_address = LOWER('0x5a58505a96D1dbf8dF91cB21B54419FC36e93fdE')
AND tr.origin_function_signature = '0x0f5287b0' -- tokenTransfer
AND destination_chain_id <> 0
{% if is_incremental() %}
AND tr._inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
{% endif %}
),
native_transfers AS (
SELECT
et.block_number,
et.block_timestamp,
et.tx_hash,
tx.from_address AS origin_from_address,
tx.to_address AS origin_to_address,
tx.origin_function_signature,
et.from_address,
et.to_address,
amount_precise_raw,
identifier,
regexp_substr_all(SUBSTR(input_data, 11, len(input_data)), '.{64}') AS segmented_data,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [0] :: STRING
)
) AS destination_chain_id,
CONCAT(
'0x',
segmented_data [1] :: STRING
) AS recipient1,
CONCAT('0x', SUBSTR(segmented_data [1] :: STRING, 25, 40)) AS recipient2,
LENGTH(
REGEXP_SUBSTR(
segmented_data [1] :: STRING,
'^(0*)'
)
) AS len,
CASE
WHEN len >= 24 THEN recipient2
ELSE recipient1
END AS destination_recipient_address,
utils.udf_hex_to_int(
segmented_data [2] :: STRING
) AS arbiterFee,
utils.udf_hex_to_int(
segmented_data [3] :: STRING
) AS nonce,
_call_id,
et._inserted_timestamp
FROM
{{ ref('silver__native_transfers') }}
et
INNER JOIN {{ ref('silver__transactions') }}
tx
ON et.block_number = tx.block_number
AND et.tx_hash = tx.tx_hash
WHERE
et.to_address = LOWER('0x5a58505a96D1dbf8dF91cB21B54419FC36e93fdE')
AND tx.origin_function_signature = '0x9981509f' -- wrapAndTransfer
AND destination_chain_id <> 0
{% if is_incremental() %}
AND et._inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
{% endif %}
),
all_transfers AS (
SELECT
block_number,
block_timestamp,
origin_from_address,
origin_to_address,
origin_function_signature,
tx_hash,
event_index,
'Transfer' AS event_name,
to_address AS bridge_address,
from_address AS sender,
to_address AS receiver,
raw_amount AS amount_unadj,
destination_chain_id,
contract_address AS token_address,
destination_recipient_address,
{{ dbt_utils.generate_surrogate_key(
['_log_id']
) }} AS _id,
_inserted_timestamp
FROM
token_transfers
UNION ALL
SELECT
block_number,
block_timestamp,
origin_from_address,
origin_to_address,
origin_function_signature,
tx_hash,
NULL AS event_index,
NULL AS event_name,
to_address AS bridge_address,
from_address AS sender,
to_address AS receiver,
amount_precise_raw AS amount_unadj,
destination_chain_id,
'0x0d500b1d8e8ef31e21c99d1db9a6444d3adf1270' AS token_address,
destination_recipient_address,
{{ dbt_utils.generate_surrogate_key(
['_call_id']
) }} AS _id,
_inserted_timestamp
FROM
native_transfers
),
base_near AS (
SELECT
near_address,
addr_encoded
FROM
{{ source(
'crosschain_silver',
'near_address_encoded'
) }}
)
SELECT
block_number,
block_timestamp,
origin_from_address,
origin_to_address,
origin_function_signature,
tx_hash,
event_index,
event_name,
'wormhole' AS platform,
bridge_address,
sender,
receiver,
amount_unadj,
destination_chain_id,
chain_name AS destination_chain,
token_address,
destination_recipient_address,
--hex address on the destination chain, requires decoding for non-EVM - more info: https://docs.wormhole.com/wormhole/blockchain-environments/environments
CASE
WHEN destination_chain = 'solana' THEN utils.udf_hex_to_base58(destination_recipient_address)
WHEN destination_chain IN ('injective','sei')
THEN utils.udf_hex_to_bech32(destination_recipient_address,SUBSTR(destination_chain,1,3))
WHEN destination_chain IN ('osmosis','xpla')
THEN utils.udf_hex_to_bech32(destination_recipient_address,SUBSTR(destination_chain,1,4))
WHEN destination_chain IN ('terra','terra2','evmos')
THEN utils.udf_hex_to_bech32(destination_recipient_address,SUBSTR(destination_chain,1,5))
WHEN destination_chain IN ('cosmoshub','kujira')
THEN utils.udf_hex_to_bech32(destination_recipient_address,SUBSTR(destination_chain,1,6))
WHEN destination_chain IN ('near')
THEN near_address
WHEN destination_chain IN ('algorand')
THEN utils.udf_hex_to_algorand(destination_recipient_address)
WHEN destination_chain IN ('polygon')
THEN SUBSTR(destination_recipient_address,1,42)
ELSE destination_recipient_address
END AS destination_chain_receiver,
_id,
_inserted_timestamp
FROM
all_transfers t
LEFT JOIN {{ ref('silver_bridge__wormhole_chain_id_seed') }}
s
ON t.destination_chain_id :: STRING = s.wormhole_chain_id :: STRING
LEFT JOIN base_near n
ON t.destination_recipient_address = n.addr_encoded
WHERE
origin_to_address IS NOT NULL qualify (ROW_NUMBER() over (PARTITION BY _id
ORDER BY
_inserted_timestamp DESC)) = 1

View File

@ -0,0 +1,52 @@
version: 2
models:
- name: silver_bridge__wormhole_transfers
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: BLOCK_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_LTZ
- TIMESTAMP_NTZ
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TOKEN_ADDRESS
tests:
- not_null
- name: SENDER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: BRIDGE_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: AMOUNT_UNADJ
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- DECIMAL
- FLOAT
- NUMBER
- name: DESTINATION_CHAIN_RECEIVER
tests:
- not_null
- name: _INSERTED_TIMESTAMP
tests:
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 3

Some files were not shown because too many files have changed in this diff Show More