AN-6265/bsc-consolidate (#369)

* remove models

* space

* 2
This commit is contained in:
drethereum 2025-07-22 10:23:51 -06:00 committed by GitHub
parent b13251d278
commit bc378e5997
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
127 changed files with 4 additions and 14249 deletions

View File

@ -1,12 +0,0 @@
chain,chain_id
ethereum,1
bsc,2
tron,3
solana,4
polygon,5
arbitrum,6
stellar,7
avalanche,8
base,9
optimism,10
celo,11
1 chain chain_id
2 ethereum 1
3 bsc 2
4 tron 3
5 solana 4
6 polygon 5
7 arbitrum 6
8 stellar 7
9 avalanche 8
10 base 9
11 optimism 10
12 celo 11

View File

@ -1,75 +0,0 @@
chain_name,chain_selector
apechain,14894068710063348487
arbitrum,4949039107694359620
l3x,3162193654116181371
treasure,1010349088906777999
areon,1939936305787790600
astar-zkevm,1540201334317828111
avalanche,6433500567565415381
dexalot,5463201557265485081
base,15971525489660198786
berachain,1294465214383781161
bsc,11344663589394136015
opbnb,465944652040885897
bitcichain,4874388048629246000
bitlayer,7937294810946806131
bittorrent,3776006016387883143
blast,4411394078118774322
bob,3849287863852499584
botanix,4560701533377838164
bsquared,5406759801798337480
celo,1346049177634351622
coinex_smart_chain,1761333065194157300
core,1224752112135636129
corn,9043146809313071210
cronos,1456215246176062136
cronos-zkevm,8788096068760390840
ethereum,5009297550715157269
fantom,3768048213127883732
filecoin,4561443241176882990
fraxtal,1462016016387883143
gnosis,465200170687744372
hashkey,7613811247471741961
hedera,3229138320728879060
immutable-zkevm,1237925231416731909
ink,3461204551265785888
kava,7550000543357438061
kroma,3719320017875267166
moonriver,1355020143337428062
lens,5608378062013572713
linea,4627098889531055414
mantle,1556008542357238666
merlin,241851231317828981
metis,8805746078405598895
mind,11690709103138290329
mode,7264351850409363825
morph,18164309074156128038
near,2039744413822257700
neonlink,8239338020728974000
optimism,3734403246176062136
plume,3208172210661564830
astar-polkadot,6422105447186081193
centrifuge,8175830712062617656
darwinia,8866418665544333000
moonbeam,1252863800116739621
polygon,4051577828743386545
polygon-zkevm,4348158687435793198
private-testnet-mica,4489326297382772450
ronin,6916147374840168594
rootstock,11964252391146578476
scroll,13204309965629103672
sei,9027416829622342829
shibarium,3993510008929295315
soneium,12505351618335765396
sonic,1673871237479749969
taiko,16468599424800719238
telos-evm,1477345371608778000
treasure,5214452172935136222
unichain,1923510103922296319
velas,374210358663784372
wemix,5142893604156789321
worldchain,2049429975587534727
xlayer,3016212468291539606
zircuit,17198166215261833993
zklink_nova,4350319965322101699
zksync,1562403441176082196
1 chain_name chain_selector
2 apechain 14894068710063348487
3 arbitrum 4949039107694359620
4 l3x 3162193654116181371
5 treasure 1010349088906777999
6 areon 1939936305787790600
7 astar-zkevm 1540201334317828111
8 avalanche 6433500567565415381
9 dexalot 5463201557265485081
10 base 15971525489660198786
11 berachain 1294465214383781161
12 bsc 11344663589394136015
13 opbnb 465944652040885897
14 bitcichain 4874388048629246000
15 bitlayer 7937294810946806131
16 bittorrent 3776006016387883143
17 blast 4411394078118774322
18 bob 3849287863852499584
19 botanix 4560701533377838164
20 bsquared 5406759801798337480
21 celo 1346049177634351622
22 coinex_smart_chain 1761333065194157300
23 core 1224752112135636129
24 corn 9043146809313071210
25 cronos 1456215246176062136
26 cronos-zkevm 8788096068760390840
27 ethereum 5009297550715157269
28 fantom 3768048213127883732
29 filecoin 4561443241176882990
30 fraxtal 1462016016387883143
31 gnosis 465200170687744372
32 hashkey 7613811247471741961
33 hedera 3229138320728879060
34 immutable-zkevm 1237925231416731909
35 ink 3461204551265785888
36 kava 7550000543357438061
37 kroma 3719320017875267166
38 moonriver 1355020143337428062
39 lens 5608378062013572713
40 linea 4627098889531055414
41 mantle 1556008542357238666
42 merlin 241851231317828981
43 metis 8805746078405598895
44 mind 11690709103138290329
45 mode 7264351850409363825
46 morph 18164309074156128038
47 near 2039744413822257700
48 neonlink 8239338020728974000
49 optimism 3734403246176062136
50 plume 3208172210661564830
51 astar-polkadot 6422105447186081193
52 centrifuge 8175830712062617656
53 darwinia 8866418665544333000
54 moonbeam 1252863800116739621
55 polygon 4051577828743386545
56 polygon-zkevm 4348158687435793198
57 private-testnet-mica 4489326297382772450
58 ronin 6916147374840168594
59 rootstock 11964252391146578476
60 scroll 13204309965629103672
61 sei 9027416829622342829
62 shibarium 3993510008929295315
63 soneium 12505351618335765396
64 sonic 1673871237479749969
65 taiko 16468599424800719238
66 telos-evm 1477345371608778000
67 treasure 5214452172935136222
68 unichain 1923510103922296319
69 velas 374210358663784372
70 wemix 5142893604156789321
71 worldchain 2049429975587534727
72 xlayer 3016212468291539606
73 zircuit 17198166215261833993
74 zklink_nova 4350319965322101699
75 zksync 1562403441176082196

View File

@ -1,81 +0,0 @@
chain,short_coin_type
ailayer,0x0a59
ancient8,0xaaaa
Aptos,0x027d
Arbitrum,0x2329
Aurora,0x0a0a
Avalanche,0x2328
b2,0x00df
Base,0x2105
bb,0x1771
Beam,0x0504
Berachain,0x38de
bevm2,0x2ced
Bitcoin,0x0000
Bitlayer,0x10c5
Blast,0x1331
BNB Chain,0x02ca
Bob,0xed88
Celo,0xce10
ckb,0x0135
Coinweb,0x08ae
Conflux,0x01f7
Core,0x045c
corn,0x6f40
Crypto Chain,0x018a
duck,0x15a9
EOS,0x00c2
Ethereum,0x003c
Evmos,0x11bc
exsat,0x1c20
Fantom,0x03ef
Flow,0x021b
Gnosis,0x02bc
Harmony,0x03ff
hash,0x03ee
hemi,0xa867
inevm,0x09dd
iotex,0x1251
Kaia,0x2019
Kroma,0x00ff
Linea,0xe708
Lisk,0x046f
Manta,0x0263
Mantle,0x1388
Map,0x58f8
Merlin,0x6868
Metis,0x0440
Mode,0x868b
Moonbeam,0x0504
Moonriver,0x0505
Morph,0x0b02
Nautilus,0x56ce
Near,0x018d
Neox,0xba93
opBNB,0x00cc
Optimism,0x0266
Polygon POS,0x03c6
Polygon zkEVM,0x044d
rsk,0x001e
Scroll,0x2750
Sei,0x0531
SKALE Calypso,0x6c62
SKALE Europa,0x9296
SKALE Nebula,0xb4b1
Solana,0x01f5
Soneium,0x074c
Sonic,0x0092
Sui,0x0310
Taiko,0x28c5
Taker,0x0465
Terra,0x014a
Thorchain,0x03a3
Thunder,0x006c
Ton,0x025f
Tron,0x00c3
xlayer,0x00c4
zeta,0x1b58
zkFair,0xa70e
zklink,0x5cc4
zircuit,0xbf04
zkSync Era,0x0324
1 chain short_coin_type
2 ailayer 0x0a59
3 ancient8 0xaaaa
4 Aptos 0x027d
5 Arbitrum 0x2329
6 Aurora 0x0a0a
7 Avalanche 0x2328
8 b2 0x00df
9 Base 0x2105
10 bb 0x1771
11 Beam 0x0504
12 Berachain 0x38de
13 bevm2 0x2ced
14 Bitcoin 0x0000
15 Bitlayer 0x10c5
16 Blast 0x1331
17 BNB Chain 0x02ca
18 Bob 0xed88
19 Celo 0xce10
20 ckb 0x0135
21 Coinweb 0x08ae
22 Conflux 0x01f7
23 Core 0x045c
24 corn 0x6f40
25 Crypto Chain 0x018a
26 duck 0x15a9
27 EOS 0x00c2
28 Ethereum 0x003c
29 Evmos 0x11bc
30 exsat 0x1c20
31 Fantom 0x03ef
32 Flow 0x021b
33 Gnosis 0x02bc
34 Harmony 0x03ff
35 hash 0x03ee
36 hemi 0xa867
37 inevm 0x09dd
38 iotex 0x1251
39 Kaia 0x2019
40 Kroma 0x00ff
41 Linea 0xe708
42 Lisk 0x046f
43 Manta 0x0263
44 Mantle 0x1388
45 Map 0x58f8
46 Merlin 0x6868
47 Metis 0x0440
48 Mode 0x868b
49 Moonbeam 0x0504
50 Moonriver 0x0505
51 Morph 0x0b02
52 Nautilus 0x56ce
53 Near 0x018d
54 Neox 0xba93
55 opBNB 0x00cc
56 Optimism 0x0266
57 Polygon POS 0x03c6
58 Polygon zkEVM 0x044d
59 rsk 0x001e
60 Scroll 0x2750
61 Sei 0x0531
62 SKALE Calypso 0x6c62
63 SKALE Europa 0x9296
64 SKALE Nebula 0xb4b1
65 Solana 0x01f5
66 Soneium 0x074c
67 Sonic 0x0092
68 Sui 0x0310
69 Taiko 0x28c5
70 Taker 0x0465
71 Terra 0x014a
72 Thorchain 0x03a3
73 Thunder 0x006c
74 Ton 0x025f
75 Tron 0x00c3
76 xlayer 0x00c4
77 zeta 0x1b58
78 zkFair 0xa70e
79 zklink 0x5cc4
80 zircuit 0xbf04
81 zkSync Era 0x0324

View File

@ -1,98 +0,0 @@
destination_chain,standard_destination_chain
acala,acala
algorand,algorand
aptos,aptos
arbitrum,arbitrum
arbitrum nova,arbitrum nova
arbitrum one,arbitrum
archway,archway
astar,astar
aurora,aurora
aurora mainnet,aurora
avalanche,avalanche
avalanche c-chain,avalanche
base,base
bnb,bsc
bnb chain,bsc
bnb smart chain mainnet,bsc
boba bnb mainnet,boba
boba network,boba
bsc,bsc
canto,canto
carbon,carbon
celo,celo
celo mainnet,celo
coinweb,coinweb
conflux,conflux
conflux espace,conflux
crab network,crab
crescent,crescent
cronos mainnet,cronos
crypto chain,crypto
dfk chain,dfk
dogechain mainnet,dogechain
eos,eos
ethereum,ethereum
ethereum mainnet,ethereum
evmos,evmos
fantom,fantom
fantom opera,fantom
filecoin,filecoin
fuse,fuse
gnosis,gnosis
harmony mainnet shard 0,harmony
huobi eco chain mainnet,huobi eco
injective,injective
juno,juno
karura,karura
kava,kava
klaytn,klaytn
klaytn mainnet cypress,klaytn
kujira,kujira
linea,linea
manta,manta
mantle,mantle
metis,metis
metis andromeda mainnet,metis
moonbeam,moonbeam
moonriver,moonriver
nautilus,nautilus
near,near
neutron,neutron
oasis,oasis
okxchain mainnet,okxchain
ontology mainnet,ontology
op mainnet,optimism
opbnb,opbnb
optimism,optimism
osmosis,osmosis
polygon,polygon
polygon mainnet,polygon
polygon pos,polygon
polygon zkevm,polygon zkevm
ronin,ronin
scroll,scroll
secret-snip,secret
sei,sei
skale europa,skale europa
skale nebula,skale nebula
solana,solana
stargaze,stargaze
starknet,starknet
sui,sui
telos evm mainnet,telos
terra,terra
terra-2,terra2
terra2,terra2
tezos,tezos
tron,tron
umee,umee
waves,waves
xpla,xpla
xrpl,xrpl
zkfair,zkfair
zksync era,zksync era
zksync era mainnet,zksync era
zksync lite,zksync lite
zora,zora
zzz,zzz
1 destination_chain standard_destination_chain
2 acala acala
3 algorand algorand
4 aptos aptos
5 arbitrum arbitrum
6 arbitrum nova arbitrum nova
7 arbitrum one arbitrum
8 archway archway
9 astar astar
10 aurora aurora
11 aurora mainnet aurora
12 avalanche avalanche
13 avalanche c-chain avalanche
14 base base
15 bnb bsc
16 bnb chain bsc
17 bnb smart chain mainnet bsc
18 boba bnb mainnet boba
19 boba network boba
20 bsc bsc
21 canto canto
22 carbon carbon
23 celo celo
24 celo mainnet celo
25 coinweb coinweb
26 conflux conflux
27 conflux espace conflux
28 crab network crab
29 crescent crescent
30 cronos mainnet cronos
31 crypto chain crypto
32 dfk chain dfk
33 dogechain mainnet dogechain
34 eos eos
35 ethereum ethereum
36 ethereum mainnet ethereum
37 evmos evmos
38 fantom fantom
39 fantom opera fantom
40 filecoin filecoin
41 fuse fuse
42 gnosis gnosis
43 harmony mainnet shard 0 harmony
44 huobi eco chain mainnet huobi eco
45 injective injective
46 juno juno
47 karura karura
48 kava kava
49 klaytn klaytn
50 klaytn mainnet cypress klaytn
51 kujira kujira
52 linea linea
53 manta manta
54 mantle mantle
55 metis metis
56 metis andromeda mainnet metis
57 moonbeam moonbeam
58 moonriver moonriver
59 nautilus nautilus
60 near near
61 neutron neutron
62 oasis oasis
63 okxchain mainnet okxchain
64 ontology mainnet ontology
65 op mainnet optimism
66 opbnb opbnb
67 optimism optimism
68 osmosis osmosis
69 polygon polygon
70 polygon mainnet polygon
71 polygon pos polygon
72 polygon zkevm polygon zkevm
73 ronin ronin
74 scroll scroll
75 secret-snip secret
76 sei sei
77 skale europa skale europa
78 skale nebula skale nebula
79 solana solana
80 stargaze stargaze
81 starknet starknet
82 sui sui
83 telos evm mainnet telos
84 terra terra
85 terra-2 terra2
86 terra2 terra2
87 tezos tezos
88 tron tron
89 umee umee
90 waves waves
91 xpla xpla
92 xrpl xrpl
93 zkfair zkfair
94 zksync era zksync era
95 zksync era mainnet zksync era
96 zksync lite zksync lite
97 zora zora
98 zzz zzz

View File

@ -1,13 +0,0 @@
chain_name,chain_id
Arbitrum,110
Avalanche,106
Base,184
BNB,102
Ethereum,101
Fantom,112
Kava,177
Linea,183
Mantle,181
Metis,151
Optimism,111
Polygon,109
1 chain_name chain_id
2 Arbitrum 110
3 Avalanche 106
4 Base 184
5 BNB 102
6 Ethereum 101
7 Fantom 112
8 Kava 177
9 Linea 183
10 Mantle 181
11 Metis 151
12 Optimism 111
13 Polygon 109

View File

@ -1,37 +0,0 @@
chain_name,wormhole_chain_id
acala,12
algorand,8
aptos,22
arbitrum,23
aurora,9
avalanche,6
avalanche,43114
base,30
bsc,4
celo,14
cosmoshub,4000
ethereum,2
evmos,4001
fantom,10
gnosis,25
injective,19
karura,11
klaytn,13
kujira,4002
moonbeam,16
near,15
neon,17
oasis,7
optimism,24
osmosis,20
polygon,5
polygon,137
pythnet,26
rootstock,33
sei,32
sepolia,10002
solana,1
sui,21
terra,3
terra2,18
xpla,28
1 chain_name wormhole_chain_id
2 acala 12
3 algorand 8
4 aptos 22
5 arbitrum 23
6 aurora 9
7 avalanche 6
8 avalanche 43114
9 base 30
10 bsc 4
11 celo 14
12 cosmoshub 4000
13 ethereum 2
14 evmos 4001
15 fantom 10
16 gnosis 25
17 injective 19
18 karura 11
19 klaytn 13
20 kujira 4002
21 moonbeam 16
22 near 15
23 neon 17
24 oasis 7
25 optimism 24
26 osmosis 20
27 polygon 5
28 polygon 137
29 pythnet 26
30 rootstock 33
31 sei 32
32 sepolia 10002
33 solana 1
34 sui 21
35 terra 3
36 terra2 18
37 xpla 28

View File

@ -106,6 +106,8 @@ models:
+enabled: false
bridge:
+enabled: true
dex:
+enabled: true
scores_package:
+enabled: true

View File

@ -1,42 +0,0 @@
{{ config(
materialized = 'view',
persist_docs ={ "relation": true,
"columns": true },
meta={
'database_tags':{
'table': {
'PROTOCOL': 'BISWAP, DODO, FRAXSWAP, KYBERSWAP, PANCAKESWAP, SUSHISWAP, TRADER JOE, UNISWAP',
'PURPOSE': 'DEX, LIQUIDITY, POOLS, LP, SWAPS',
}
}
},
tags = ['gold','defi','dex','curated']
) }}
SELECT
block_number AS creation_block,
block_timestamp AS creation_time,
tx_hash AS creation_tx,
platform,
contract_address AS factory_address,
pool_address,
pool_name,
tokens,
symbols,
decimals,
COALESCE (
complete_dex_liquidity_pools_id,
{{ dbt_utils.generate_surrogate_key(
['pool_address']
) }}
) AS dim_dex_liquidity_pools_id,
COALESCE(
inserted_timestamp,
'2000-01-01'
) AS inserted_timestamp,
COALESCE(
modified_timestamp,
'2000-01-01'
) AS modified_timestamp
FROM
{{ ref('silver_dex__complete_dex_liquidity_pools') }}

View File

@ -1,63 +0,0 @@
{{ config(
materialized = 'view',
persist_docs ={ "relation": true,
"columns": true },
meta ={
'database_tags':{
'table':{
'PROTOCOL': 'ALLBRIDGE, AXELAR, CELER, CBRIDGE, DLN, DEBRIDGE, EYWA, MESON, STARGATE, SYMBIOSIS, SYNAPSE, WORMHOLE, CCIP',
'PURPOSE': 'BRIDGE'
} } },
tags = ['gold','defi','bridge','curated','ez']
) }}
SELECT
block_number,
block_timestamp,
origin_from_address,
origin_to_address,
origin_function_signature,
tx_hash,
event_index,
bridge_address,
event_name,
platform,
sender,
receiver,
destination_chain_receiver,
COALESCE(
c.standardized_name,
b.destination_chain
) AS destination_chain,
destination_chain_id,
token_address,
token_symbol,
amount_unadj,
amount,
ROUND(
CASE
WHEN amount_usd < 1e+15 THEN amount_usd
ELSE NULL
END,
2
) AS amount_usd,
token_is_verified,
COALESCE (
complete_bridge_activity_id,
{{ dbt_utils.generate_surrogate_key(
['_id']
) }}
) AS ez_bridge_activity_id,
COALESCE(
inserted_timestamp,
'2000-01-01'
) AS inserted_timestamp,
COALESCE(
modified_timestamp,
'2000-01-01'
) AS modified_timestamp
FROM
{{ ref('silver_bridge__complete_bridge_activity') }}
b
LEFT JOIN {{ ref('silver_bridge__standard_chain_seed') }} C
ON b.destination_chain = C.variation

View File

@ -1,81 +0,0 @@
{{ config(
materialized = 'view',
persist_docs ={ "relation": true,
"columns": true },
meta={
'database_tags':{
'table': {
'PROTOCOL': 'TRADER JOE, WOOFI, KYBERSWAP, PANCAKE SWAP, LEVEL, BISWAP, DODO, FRAX, HASHFLOW, UNISWAP, SUSHI',
'PURPOSE': 'DEX, SWAPS'
}
}
},
tags = ['gold','defi','dex','curated','ez']
) }}
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
pool_name,
event_name,
amount_in_unadj,
amount_in,
ROUND(
CASE
WHEN token_in <> '0xbb4cdb9cbd36b01bd1cbaebf2de08d9173bc095c' or not token_in_is_verified
AND (
amount_out_usd IS NULL
OR ABS((amount_in_usd - amount_out_usd) / NULLIF(amount_out_usd, 0)) > 0.75
OR ABS((amount_in_usd - amount_out_usd) / NULLIF(amount_in_usd, 0)) > 0.75
) THEN NULL
ELSE amount_in_usd
END,
2
) AS amount_in_usd,
amount_out_unadj,
amount_out,
ROUND(
CASE
WHEN token_out <> '0xbb4cdb9cbd36b01bd1cbaebf2de08d9173bc095c' or not token_out_is_verified
AND (
amount_in_usd IS NULL
OR ABS((amount_out_usd - amount_in_usd) / NULLIF(amount_in_usd, 0)) > 0.75
OR ABS((amount_out_usd - amount_in_usd) / NULLIF(amount_out_usd, 0)) > 0.75
) THEN NULL
ELSE amount_out_usd
END,
2
) AS amount_out_usd,
sender,
tx_to,
event_index,
platform,
protocol,
version as protocol_version,
token_in,
token_in_is_verified,
token_out,
token_out_is_verified,
symbol_in,
symbol_out,
_log_id,
COALESCE (
complete_dex_swaps_id,
{{ dbt_utils.generate_surrogate_key(
['tx_hash','event_index']
) }}
) AS ez_dex_swaps_id,
COALESCE(
inserted_timestamp,
'2000-01-01'
) AS inserted_timestamp,
COALESCE(
modified_timestamp,
'2000-01-01'
) AS modified_timestamp
FROM {{ ref('silver_dex__complete_dex_swaps') }}

View File

@ -1,134 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = "block_number",
cluster_by = ['block_timestamp::DATE'],
tags = ['silver_bridge','defi','bridge','curated']
) }}
WITH base_evt AS (
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
'across-v3' AS NAME,
event_index,
topic_0,
CASE
WHEN topic_0 = '0x32ed1a409ef04c7b0227189c3a103dc5ac10e775a15b785dcc510201f7c25ad3' THEN 'FundsDeposited'
WHEN topic_0 = '0xa123dc29aebf7d0c3322c8eeb5b999e859f39937950ed31056532713d0de396f' THEN 'V3FundsDeposited'
END AS event_name,
topics,
DATA,
regexp_substr_all(SUBSTR(DATA, 3, len(DATA)), '.{64}') AS segmented_data,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
topic_1 :: STRING
)
) AS destinationChainId,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
topic_2 :: STRING
)
) AS depositId,
CONCAT('0x', SUBSTR(topic_3 :: STRING, 27, 40)) AS depositor,
CONCAT('0x', SUBSTR(segmented_data [0] :: STRING, 25, 40)) AS inputToken,
CONCAT('0x', SUBSTR(segmented_data [1] :: STRING, 25, 40)) AS outputToken,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [2] :: STRING
)
) AS inputAmount,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [3] :: STRING
)
) AS outputAmount,
TRY_TO_TIMESTAMP(
utils.udf_hex_to_int(
segmented_data [4] :: STRING
)
) AS quoteTimestamp,
TRY_TO_TIMESTAMP(
utils.udf_hex_to_int(
segmented_data [5] :: STRING
)
) AS fillDeadline,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [6] :: STRING
)
) AS exclusivityDeadline,
CONCAT('0x', SUBSTR(segmented_data [7] :: STRING, 25, 40)) AS recipient,
CONCAT('0x', SUBSTR(segmented_data [8] :: STRING, 25, 40)) AS exclusiveRelayer,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [9] :: STRING
)
) AS relayerFeePct,
segmented_data [10] :: STRING AS message,
event_removed,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__fact_event_logs') }}
WHERE
topic_0 IN (
'0x32ed1a409ef04c7b0227189c3a103dc5ac10e775a15b785dcc510201f7c25ad3',
'0xa123dc29aebf7d0c3322c8eeb5b999e859f39937950ed31056532713d0de396f'
)
AND contract_address = '0x4e8e101924ede233c13e2d8622dc8aed2872d505'
AND tx_succeeded
AND block_timestamp :: DATE >= '2025-04-01'
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
)
SELECT
block_number,
block_timestamp,
origin_function_signature,
origin_from_address,
origin_to_address,
tx_hash,
event_index,
topic_0,
event_name,
event_removed,
contract_address AS bridge_address,
NAME AS platform,
depositor AS sender,
recipient AS receiver,
recipient AS destination_chain_receiver,
destinationChainId AS destination_chain_id,
inputAmount AS amount,
inputToken AS token_address,
depositId AS deposit_id,
message,
quoteTimestamp AS quote_timestamp,
relayerFeePct AS relayer_fee_pct,
exclusiveRelayer AS exclusive_relayer,
exclusivityDeadline AS exclusivity_deadline,
fillDeadline AS fill_deadline,
outputAmount AS output_amount,
outputToken AS output_token,
_log_id,
_inserted_timestamp
FROM
base_evt

View File

@ -1,71 +0,0 @@
version: 2
models:
- name: silver_bridge__across_v3fundsdeposited
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: BLOCK_TIMESTAMP
tests:
- not_null
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null
- name: ORIGIN_FROM_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: ORIGIN_TO_ADDRESS
tests:
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: EVENT_INDEX
tests:
- not_null
- name: EVENT_NAME
tests:
- not_null
- name: BRIDGE_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: SENDER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: RECEIVER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: DESTINATION_CHAIN_RECEIVER
tests:
- not_null
- name: AMOUNT
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- DECIMAL
- FLOAT
- NUMBER
- name: TOKEN_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: _INSERTED_TIMESTAMP
tests:
- not_null

View File

@ -1,157 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = "block_number",
cluster_by = ['block_timestamp::DATE'],
tags = ['silver_bridge','defi','bridge','curated']
) }}
WITH base_evt AS (
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
'allbridge' AS NAME,
event_index,
topics [0] :: STRING AS topic_0,
event_name,
TRY_TO_NUMBER(
decoded_log :"amount" :: STRING
) AS amount,
utils.udf_hex_to_string(
SUBSTRING(
decoded_log :"destination" :: STRING,
3
)
) AS destination_chain_symbol,
decoded_log :"lockId" :: STRING AS lockId,
decoded_log :"recipient" :: STRING AS recipient,
decoded_log :"sender" :: STRING AS sender,
utils.udf_hex_to_string(
SUBSTRING(
decoded_log :"tokenSource" :: STRING,
3
)
) AS token_source,
REGEXP_REPLACE(
decoded_log :"tokenSourceAddress" :: STRING,
'0+$',
''
) AS tokenSourceAddress,
decoded_log,
event_removed,
IFF(
tx_succeeded,
'SUCCESS',
'FAIL'
) AS tx_status,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__ez_decoded_event_logs') }}
WHERE
topics [0] :: STRING = '0x884a8def17f0d5bbb3fef53f3136b5320c9b39f75afb8985eeab9ea1153ee56d'
AND contract_address = '0xbbbd1bbb4f9b936c3604906d7592a644071de884'
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
)
SELECT
block_number,
block_timestamp,
origin_function_signature,
origin_from_address,
origin_to_address,
tx_hash,
event_index,
topic_0,
event_name,
event_removed,
tx_status,
contract_address AS bridge_address,
NAME AS platform,
sender,
recipient AS receiver,
amount,
lockId AS lock_id,
CASE
WHEN destination_chain_symbol = 'AURO' THEN 'aurora mainnet'
WHEN destination_chain_symbol = 'AVA' THEN 'avalanche c-chain'
WHEN destination_chain_symbol = 'BSC' THEN 'bnb smart chain mainnet'
WHEN destination_chain_symbol = 'CELO' THEN 'celo mainnet'
WHEN destination_chain_symbol = 'ETH' THEN 'ethereum mainnet'
WHEN destination_chain_symbol = 'FTM' THEN 'fantom opera'
WHEN destination_chain_symbol = 'HECO' THEN 'huobi eco chain mainnet'
WHEN destination_chain_symbol = 'KLAY' THEN 'klaytn mainnet cypress'
WHEN destination_chain_symbol = 'POL' THEN 'polygon mainnet'
WHEN destination_chain_symbol = 'SOL' THEN 'solana'
WHEN destination_chain_symbol = 'TRA' THEN 'terra'
WHEN destination_chain_symbol = 'TEZ' THEN 'tezos'
WHEN destination_chain_symbol = 'WAVE' THEN 'waves'
ELSE LOWER(destination_chain_symbol)
END AS destination_chain,
CASE
WHEN token_source = 'AURO' THEN 'aurora mainnet'
WHEN token_source = 'AVA' THEN 'avalanche c-chain'
WHEN token_source = 'BSC' THEN 'bnb smart chain mainnet'
WHEN token_source = 'CELO' THEN 'celo mainnet'
WHEN token_source = 'ETH' THEN 'ethereum mainnet'
WHEN token_source = 'FTM' THEN 'fantom opera'
WHEN token_source = 'HECO' THEN 'huobi eco chain mainnet'
WHEN token_source = 'KLAY' THEN 'klaytn mainnet cypress'
WHEN token_source = 'POL' THEN 'polygon mainnet'
WHEN token_source = 'SOL' THEN 'solana'
WHEN token_source = 'TRA' THEN 'terra'
WHEN token_source = 'TEZ' THEN 'tezos'
WHEN token_source = 'WAVE' THEN 'waves'
ELSE LOWER(token_source)
END AS source_chain,
CASE
WHEN destination_chain = 'solana' THEN utils.udf_hex_to_base58(recipient)
WHEN destination_chain = 'waves' THEN utils.udf_hex_to_base58(SUBSTR(recipient,1,54))
WHEN destination_chain ILIKE 'terra%' THEN utils.udf_hex_to_bech32(recipient, SUBSTR(destination_chain, 1, 5))
WHEN destination_chain = 'tezos' THEN utils.udf_hex_to_tezos(CONCAT('0x', SUBSTR(recipient, 7, 40)), 'tz1')
WHEN destination_chain = 'near' THEN utils.udf_hex_to_string(SUBSTR(recipient,3))
WHEN destination_chain IN (
'aurora mainnet',
'avalanche c-chain',
'bnb smart chain mainnet',
'celo mainnet',
'fantom opera',
'fuse',
'huobi eco chain mainnet',
'klaytn mainnet cypress',
'polygon mainnet'
) THEN SUBSTR(
recipient,
1,
42
)
WHEN destination_chain = 'zzz' THEN origin_from_address
ELSE recipient
END AS destination_chain_receiver,
tokenSourceAddress AS token_address,
_log_id,
_inserted_timestamp
FROM
base_evt
WHERE
source_chain = 'bnb smart chain mainnet'

View File

@ -1,69 +0,0 @@
version: 2
models:
- name: silver_bridge__allbridge_sent
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: BLOCK_TIMESTAMP
tests:
- not_null
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null
- name: ORIGIN_FROM_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: ORIGIN_TO_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: EVENT_INDEX
tests:
- not_null
- name: EVENT_NAME
tests:
- not_null
- name: BRIDGE_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: SENDER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: RECEIVER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: DESTINATION_CHAIN_RECEIVER
tests:
- not_null
- name: AMOUNT
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- DECIMAL
- FLOAT
- NUMBER
- name: TOKEN_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+

View File

@ -1,163 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = "block_number",
cluster_by = ['block_timestamp::DATE'],
tags = ['silver_bridge','defi','bridge','curated']
) }}
WITH base_evt AS (
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
'allbridge' AS platform,
event_index,
'TokensSent' AS event_name,
regexp_substr_all(SUBSTR(DATA, 3, len(DATA)), '.{64}') AS segmented_data,
TRY_TO_NUMBER(utils.udf_hex_to_int(segmented_data [0] :: STRING)) AS amount,
TRY_TO_NUMBER(utils.udf_hex_to_int(segmented_data [2] :: STRING)) AS destinationChainId,
origin_from_address AS sender,
origin_from_address AS recipient,
TRY_TO_NUMBER(utils.udf_hex_to_int(segmented_data [4] :: STRING)) AS nonce,
TRY_TO_NUMBER(utils.udf_hex_to_int(segmented_data [5] :: STRING)) AS messenger,
IFF(
tx_succeeded,
'SUCCESS',
'FAIL'
) AS tx_status,
CONCAT(
tx_hash,
'-',
event_index
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__fact_event_logs') }}
WHERE
topics [0] = '0x9cd6008e8d4ebd34fd9d022278fec7f95d133780ecc1a0dea459fae3e9675390' --TokensSent
AND contract_address = '0x3c4fa639c8d7e65c603145adad8bd12f2358312f' --Allbridge
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
),
lp_evt AS (
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
event_index,
regexp_substr_all(SUBSTR(DATA, 3, len(DATA)), '.{64}') AS segmented_data,
CONCAT('0x', SUBSTR(segmented_data [0] :: STRING, 25, 40)) AS sender,
CONCAT('0x', SUBSTR(segmented_data [1] :: STRING, 25, 40)) AS token,
TRY_TO_NUMBER(utils.udf_hex_to_int(segmented_data [2] :: STRING)) AS amount,
TRY_TO_NUMBER(utils.udf_hex_to_int(segmented_data [3] :: STRING)) AS vUsdAmount,
TRY_TO_NUMBER(utils.udf_hex_to_int(segmented_data [4] :: STRING)) AS fee,
IFF(
tx_succeeded,
'SUCCESS',
'FAIL'
) AS tx_status,
CONCAT(
tx_hash,
'-',
event_index
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__fact_event_logs') }}
WHERE
topics [0] = '0xa930da1d3f27a25892307dd59cec52dd9b881661a0f20364757f83a0da2f6873' --SwappedToVUsd
AND contract_address IN (
'0xf833afa46fcd100e62365a0fdb0734b7c4537811',
--USDT LP
'0x8033d5b454ee4758e4bd1d37a49009c1a81d8b10' --USDC LP
)
AND tx_hash IN (
SELECT
tx_hash
FROM
base_evt
)
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
)
SELECT
s.block_number,
s.block_timestamp,
s.tx_hash,
s.origin_function_signature,
s.origin_from_address,
s.origin_to_address,
s.contract_address AS bridge_address,
s.event_index,
s.event_name,
s.platform,
lp.amount,
lp.token AS token_address,
s.sender,
s.recipient AS receiver,
C.chain AS destination_chain,
s.destinationChainId AS destination_chain_id,
CASE
WHEN C.chain = 'solana' THEN utils.udf_hex_to_base58(CONCAT('0x', s.segmented_data [1] :: STRING))
WHEN C.chain = 'stellar' THEN s.segmented_data [1] :: STRING
ELSE CONCAT(
'0x',
SUBSTR(
s.segmented_data [1] :: STRING,
25,
40
)
)
END AS destination_chain_receiver,
CASE
WHEN C.chain = 'solana' THEN utils.udf_hex_to_base58(CONCAT('0x', s.segmented_data [3] :: STRING))
WHEN C.chain = 'stellar' THEN s.segmented_data [3] :: STRING
ELSE CONCAT(
'0x',
SUBSTR(
s.segmented_data [3] :: STRING,
25,
40
)
)
END AS destination_chain_token,
s.tx_status,
s._log_id,
s._inserted_timestamp
FROM
base_evt s
INNER JOIN lp_evt lp
ON s.tx_hash = lp.tx_hash
AND s.block_number = lp.block_number
LEFT JOIN {{ ref('silver_bridge__allbridge_chain_id_seed') }} C
ON s.destinationChainId = C.chain_id qualify(ROW_NUMBER() over (PARTITION BY s._log_id
ORDER BY
s._inserted_timestamp DESC)) = 1

View File

@ -1,69 +0,0 @@
version: 2
models:
- name: silver_bridge__allbridge_tokens_sent
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: BLOCK_TIMESTAMP
tests:
- not_null
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null
- name: ORIGIN_FROM_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: ORIGIN_TO_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: EVENT_INDEX
tests:
- not_null
- name: EVENT_NAME
tests:
- not_null
- name: BRIDGE_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: SENDER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: RECEIVER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: DESTINATION_CHAIN_RECEIVER
tests:
- not_null
- name: AMOUNT
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- DECIMAL
- FLOAT
- NUMBER
- name: TOKEN_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+

View File

@ -1,240 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = "block_number",
cluster_by = ['block_timestamp::DATE'],
tags = ['silver_bridge','defi','bridge','curated']
) }}
WITH base_evt AS (
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
'axelar' AS NAME,
event_index,
topics [0] :: STRING AS topic_0,
event_name,
TRY_TO_NUMBER(
decoded_log :"amount" :: STRING
) AS amount,
decoded_log :"destinationChain" :: STRING AS destinationChain,
LOWER(
decoded_log :"destinationContractAddress" :: STRING
) AS destinationContractAddress,
decoded_log :"payload" :: STRING AS payload,
origin_from_address AS recipient,
decoded_log :"payloadHash" :: STRING AS payloadHash,
decoded_log :"sender" :: STRING AS sender,
decoded_log :"symbol" :: STRING AS symbol,
decoded_log,
event_removed,
IFF(
tx_succeeded,
'SUCCESS',
'FAIL'
) AS tx_status,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__ez_decoded_event_logs') }}
WHERE
topics [0] :: STRING = '0x7e50569d26be643bda7757722291ec66b1be66d8283474ae3fab5a98f878a7a2'
AND contract_address = '0x304acf330bbe08d1e512eefaa92f6a57871fd895'
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
),
native_gas_paid AS (
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
'axelar' AS NAME,
event_index,
topics [0] :: STRING AS topic_0,
event_name,
TRY_TO_NUMBER(
decoded_log :"amount" :: STRING
) AS amount,
decoded_log :"destinationChain" :: STRING AS destinationChain,
LOWER(
decoded_log :"destinationAddress" :: STRING
) AS destinationAddress,
TRY_TO_NUMBER(
decoded_log :"gasFeeAmount" :: STRING
) AS gasFeeAmount,
decoded_log :"payloadHash" :: STRING AS payloadHash,
decoded_log :"refundAddress" :: STRING AS refundAddress,
decoded_log :"sourceAddress" :: STRING AS sourceAddress,
decoded_log :"symbol" :: STRING AS symbol,
decoded_log,
event_removed,
IFF(
tx_succeeded,
'SUCCESS',
'FAIL'
) AS tx_status,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__ez_decoded_event_logs') }}
WHERE
topics [0] :: STRING = '0x999d431b58761213cf53af96262b67a069cbd963499fd8effd1e21556217b841'
AND contract_address = '0x2d5d7d31f671f86c782533cc367f14109a082712'
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
),
transfers AS (
SELECT
block_number,
tx_hash,
event_index,
contract_address AS token_address,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__ez_token_transfers') }}
WHERE
from_address = '0xce16f69375520ab01377ce7b88f5ba8c48f8d666'
AND to_address IN (
'0x304acf330bbe08d1e512eefaa92f6a57871fd895',
'0x0000000000000000000000000000000000000000'
)
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
),
FINAL AS (
SELECT
b.block_number,
b.block_timestamp,
b.origin_function_signature,
b.origin_from_address,
b.origin_to_address,
b.tx_hash,
b.event_index,
b.topic_0,
b.event_name,
b.event_removed,
b.tx_status,
b.contract_address AS bridge_address,
b.name AS platform,
b.origin_from_address AS sender,
CASE
WHEN b.recipient = '0x0000000000000000000000000000000000000000' THEN refundAddress
ELSE b.recipient
END AS receiver,
CASE
WHEN LOWER(
b.destinationChain
) = 'avalanche' THEN 'avalanche c-chain'
WHEN LOWER(
b.destinationChain
) = 'binance' THEN 'bnb smart chain mainnet'
WHEN LOWER(
b.destinationChain
) = 'celo' THEN 'celo mainnet'
WHEN LOWER(
b.destinationChain
) = 'ethereum' THEN 'ethereum mainnet'
WHEN LOWER(
b.destinationChain
) = 'fantom' THEN 'fantom opera'
WHEN LOWER(
b.destinationChain
) = 'polygon' THEN 'polygon mainnet'
ELSE LOWER(
b.destinationChain
)
END AS destination_chain,
b.destinationContractAddress AS destination_contract_address,
CASE
WHEN destination_chain IN (
'arbitrum',
'avalanche c-chain',
'base',
'bnb smart chain mainnet',
'celo mainnet',
'ethereum mainnet',
'fantom opera',
'filecoin',
'kava',
'linea',
'mantle',
'moonbeam',
'optimism',
'polygon mainnet',
'scroll'
) THEN receiver
ELSE destination_contract_address
END AS destination_chain_receiver,
b.amount,
b.payload,
b.payloadHash AS payload_hash,
b.symbol AS token_symbol,
t.token_address,
b._log_id,
b._inserted_timestamp
FROM
base_evt b
INNER JOIN transfers t
ON b.block_number = t.block_number
AND b.tx_hash = t.tx_hash
LEFT JOIN native_gas_paid n
ON n.block_number = b.block_number
AND n.tx_hash = b.tx_hash
)
SELECT
*
FROM
FINAL qualify (ROW_NUMBER() over (PARTITION BY _log_id
ORDER BY
_inserted_timestamp DESC)) = 1

View File

@ -1,70 +0,0 @@
version: 2
models:
- name: silver_bridge__axelar_contractcallwithtoken
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: BLOCK_TIMESTAMP
tests:
- not_null
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null
- name: ORIGIN_FROM_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: ORIGIN_TO_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: EVENT_INDEX
tests:
- not_null
- name: EVENT_NAME
tests:
- not_null
- name: BRIDGE_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: SENDER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: RECEIVER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: DESTINATION_CHAIN_RECEIVER
tests:
- not_null
- name: AMOUNT
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- DECIMAL
- FLOAT
- NUMBER
- name: TOKEN_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: _INSERTED_TIMESTAMP

View File

@ -1,146 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = "block_number",
cluster_by = ['block_timestamp::DATE'],
tags = ['silver_bridge','defi','bridge','curated']
) }}
WITH on_ramp_set AS (
SELECT
block_timestamp,
tx_hash,
event_name,
TRY_TO_NUMBER(
decoded_log :destChainSelector :: STRING
) AS destChainSelector,
chain_name,
decoded_log :onRamp :: STRING AS onRampAddress,
modified_timestamp
FROM
{{ ref('core__ez_decoded_event_logs') }}
INNER JOIN {{ ref('silver_bridge__ccip_chain_seed') }}
ON destChainSelector = chain_selector
WHERE
contract_address = LOWER('0x34B03Cb9086d7D758AC55af71584F81A598759FE') -- ccip router
AND topic_0 = '0x1f7d0ec248b80e5c0dde0ee531c4fc8fdb6ce9a2b3d90f560c74acd6a7202f23' -- onrampset
AND tx_succeeded
AND event_removed = FALSE
{% if is_incremental() %}
AND modified_timestamp >= (
SELECT
MAX(modified_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND modified_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
),
ccip_sent AS (
SELECT
l.block_number,
l.block_timestamp,
l.tx_hash,
l.origin_function_signature,
l.origin_from_address,
l.origin_to_address,
contract_address,
l.event_name,
l.event_index,
regexp_substr_all(SUBSTR(DATA, 3, len(DATA)), '.{64}') AS segmented_data,
CONCAT(
'0x',
segmented_data [13] :: STRING
) AS message_id,
l.decoded_log,
decoded_log :message :feeToken :: STRING AS fee_token,
TRY_TO_NUMBER(
decoded_log :message :feeTokenAmount :: STRING
) AS fee_token_amount,
TRY_TO_NUMBER(
decoded_log :message :gasLimit :: STRING
) AS gas_limit,
TRY_TO_NUMBER(
decoded_log :message :nonce :: STRING
) AS nonce,
decoded_log :message :receiver :: STRING AS receiver,
decoded_log :message :sender :: STRING AS sender,
TRY_TO_NUMBER(
decoded_log :message :sequenceNumber :: STRING
) AS sequence_number,
TRY_TO_NUMBER(
decoded_log :message :sourceChainSelector :: STRING
) AS source_chain_selector,
destChainSelector AS dest_chain_selector,
chain_name,
decoded_log :message :tokenAmounts AS token_amounts,
ARRAY_SIZE(
decoded_log :message :tokenAmounts
) AS token_amounts_count,
CONCAT(
l.tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
l.modified_timestamp
FROM
{{ ref('core__ez_decoded_event_logs') }}
l
INNER JOIN on_ramp_set
ON onRampAddress = contract_address
WHERE
topic_0 = '0xd0c3c799bf9e2639de44391e7f524d229b2b55f5b1ea94b2bf7da42f7243dddd' -- CCIPSendRequested
AND tx_succeeded
AND event_removed = FALSE
{% if is_incremental() %}
AND l.modified_timestamp >= (
SELECT
MAX(modified_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND l.modified_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
)
SELECT
C.block_number,
C.block_timestamp,
C.origin_function_signature,
C.origin_from_address,
C.origin_to_address,
C.tx_hash,
C.event_name,
C.event_index,
'chainlink-ccip' AS platform,
'v1' AS version,
C.contract_address AS bridge_address,
C.message_id,
C.nonce,
C.receiver,
C.sender,
C.receiver AS destination_chain_receiver,
C.sequence_number,
C.source_chain_selector,
C.dest_chain_selector AS destination_chain_id,
C.chain_name AS destination_chain,
C.gas_limit,
C.fee_token,
-- Divide the fee evenly by the number of tokens in the array
C.fee_token_amount / C.token_amounts_count AS fee_token_amount_per_token,
C.token_amounts_count,
TRY_TO_NUMBER(
tokens.value :amount :: STRING
) AS amount_unadj,
tokens.value :token :: STRING AS token_address,
C._log_id,
C.modified_timestamp
FROM
ccip_sent C,
LATERAL FLATTEN(
input => C.token_amounts
) AS tokens
WHERE
token_amounts_count > 0

View File

@ -1,70 +0,0 @@
version: 2
models:
- name: silver_bridge__ccip_send_requested
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: BLOCK_TIMESTAMP
tests:
- not_null
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null
- name: ORIGIN_FROM_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: ORIGIN_TO_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: EVENT_INDEX
tests:
- not_null
- name: EVENT_NAME
tests:
- not_null
- name: BRIDGE_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: SENDER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: RECEIVER
tests:
- not_null
- name: DESTINATION_CHAIN_RECEIVER
tests:
- not_null
- name: AMOUNT_UNADJ
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- DECIMAL
- FLOAT
- NUMBER
- name: TOKEN_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: MODIFIED_TIMESTAMP
tests:
- not_null

View File

@ -1,101 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = "block_number",
cluster_by = ['block_timestamp::DATE'],
tags = ['silver_bridge','defi','bridge','curated']
) }}
WITH base_evt AS (
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
'celer_cbridge' AS NAME,
event_index,
topics [0] :: STRING AS topic_0,
event_name,
TRY_TO_NUMBER(
decoded_log :"amount" :: STRING
) AS amount,
TRY_TO_NUMBER(
decoded_log :"dstChainId" :: STRING
) AS dstChainId,
TRY_TO_NUMBER(
decoded_log :"maxSlippage" :: STRING
) AS maxSlippage,
TRY_TO_NUMBER(
decoded_log :"nonce" :: STRING
) AS nonce,
decoded_log :"receiver" :: STRING AS receiver,
decoded_log :"sender" :: STRING AS sender,
decoded_log :"token" :: STRING AS token,
decoded_log :"transferId" :: STRING AS transferId,
decoded_log,
event_removed,
IFF(
tx_succeeded,
'SUCCESS',
'FAIL'
) AS tx_status,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__ez_decoded_event_logs') }}
WHERE
topics [0] :: STRING = '0x89d8051e597ab4178a863a5190407b98abfeff406aa8db90c59af76612e58f01'
AND contract_address IN (
'0x5d96d4287d1ff115ee50fac0526cf43ecf79bfc6',
'0x9b36f165bab9ebe611d491180418d8de4b8f3a1f',
'0x265b25e22bcd7f10a5bd6e6410f10537cc7567e8',
'0xdd90e5e87a2081dcf0391920868ebc2ffb81a1af'
)
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
)
SELECT
block_number,
block_timestamp,
origin_function_signature,
origin_from_address,
origin_to_address,
tx_hash,
event_index,
topic_0,
event_name,
event_removed,
tx_status,
contract_address AS bridge_address,
NAME AS platform,
sender,
receiver,
receiver AS destination_chain_receiver,
amount,
dstChainId AS destination_chain_id,
maxSlippage AS max_slippage,
nonce,
token AS token_address,
transferId AS transfer_id,
_log_id,
_inserted_timestamp
FROM
base_evt

View File

@ -1,74 +0,0 @@
version: 2
models:
- name: silver_bridge__celer_cbridge_send
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: BLOCK_TIMESTAMP
tests:
- not_null
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null
- name: ORIGIN_FROM_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: ORIGIN_TO_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: EVENT_INDEX
tests:
- not_null
- name: EVENT_NAME
tests:
- not_null
- name: BRIDGE_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: SENDER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: RECEIVER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: DESTINATION_CHAIN_RECEIVER
tests:
- not_null
- name: AMOUNT
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- DECIMAL
- FLOAT
- NUMBER
- name: TOKEN_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: _INSERTED_TIMESTAMP
tests:
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 3

View File

@ -1,129 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = "block_number",
cluster_by = ['block_timestamp::DATE'],
tags = ['silver_bridge','defi','bridge','curated']
) }}
WITH base_evt AS (
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
'dln_debridge' AS NAME,
event_index,
topics [0] :: STRING AS topic_0,
event_name,
DATA,
regexp_substr_all(SUBSTR(DATA, 3, len(DATA)), '.{64}') AS segmented_data,
CONCAT('0x', SUBSTR(segmented_data [24] :: STRING, 1, 40)) AS token_address,
decoded_log :"affiliateFee" :: STRING AS affiliateFee,
decoded_log :"metadata" :: STRING AS metadata,
TRY_TO_NUMBER(
decoded_log :"nativeFixFee" :: STRING
) AS nativeFixFee,
decoded_log :"order" AS order_obj,
decoded_log :"order" :"allowedCancelBeneficiarySrc" :: STRING AS allowedCancelBeneficiarySrc,
decoded_log :"order" :"allowedTakerDst" :: STRING AS allowedTakerDst,
decoded_log :"order" :"externalCall" :: STRING AS externalCall,
TRY_TO_NUMBER(
decoded_log :"order" :"giveAmount" :: STRING
) AS giveAmount,
TRY_TO_NUMBER(
decoded_log :"order" :"giveChainId" :: STRING
) AS giveChainId,
decoded_log :"order" :"givePatchAuthoritySrc" :: STRING AS givePatchAuthoritySrc,
decoded_log :"order" :"giveTokenAddress" :: STRING AS giveTokenAddress,
TRY_TO_NUMBER(
decoded_log :"order" :"makerOrderNonce" :: STRING
) AS makerOrderNonce,
decoded_log :"order" :"makerSrc" :: STRING AS makerSrc,
decoded_log :"order" :"orderAuthorityAddressDst" :: STRING AS orderAuthorityAddressDst,
CONCAT('0x', LEFT(segmented_data [28] :: STRING, 40)) AS receiverDst,
TRY_TO_NUMBER(
decoded_log :"order" :"takeAmount" :: STRING
) AS takeAmount,
TRY_TO_NUMBER(
decoded_log :"order" :"takeChainId" :: STRING
) AS takeChainId,
decoded_log :"order" :"takeTokenAddress" :: STRING AS takeTokenAddress,
decoded_log :"orderId" :: STRING AS orderId,
TRY_TO_NUMBER(
decoded_log :"percentFee" :: STRING
) AS percentFee,
TRY_TO_NUMBER(
decoded_log :"referralCode" :: STRING
) AS referralCode,
decoded_log,
event_removed,
IFF(
tx_succeeded,
'SUCCESS',
'FAIL'
) AS tx_status,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__ez_decoded_event_logs') }}
WHERE
topics [0] :: STRING = '0xfc8703fd57380f9dd234a89dce51333782d49c5902f307b02f03e014d18fe471' --CreatedOrder
AND contract_address = '0xef4fb24ad0916217251f553c0596f8edc630eb66' --Dln: Source
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
)
SELECT
block_number,
block_timestamp,
origin_function_signature,
origin_from_address,
origin_to_address,
tx_hash,
event_index,
topic_0,
event_name,
event_removed,
tx_status,
contract_address AS bridge_address,
NAME AS platform,
origin_from_address AS sender,
receiverDst AS receiver,
CASE
WHEN takeChainId :: STRING = '7565164' THEN utils.udf_hex_to_base58(CONCAT('0x', segmented_data [28] :: STRING))
ELSE receiverDst
END AS destination_chain_receiver,
giveAmount AS amount,
takeChainId AS destination_chain_id,
CASE
WHEN destination_chain_id :: STRING = '7565164' THEN 'solana'
ELSE NULL
END AS destination_chain,
CASE
WHEN token_address = '0x0000000000000000000000000000000000000000' THEN LOWER('0xbb4CdB9CBd36B01bD1cBaEBF2De08d9173bc095c')
ELSE token_address
END AS token_address,
decoded_log,
order_obj,
_log_id,
_inserted_timestamp
FROM
base_evt

View File

@ -1,74 +0,0 @@
version: 2
models:
- name: silver_bridge__dln_debridge_createdorder
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: BLOCK_TIMESTAMP
tests:
- not_null
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null
- name: ORIGIN_FROM_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: ORIGIN_TO_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: EVENT_INDEX
tests:
- not_null
- name: EVENT_NAME
tests:
- not_null
- name: BRIDGE_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: SENDER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: RECEIVER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: DESTINATION_CHAIN_RECEIVER
tests:
- not_null
- name: AMOUNT
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- DECIMAL
- FLOAT
- NUMBER
- name: TOKEN_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: _INSERTED_TIMESTAMP
tests:
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 3

View File

@ -1,149 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = "block_number",
cluster_by = ['block_timestamp::DATE'],
tags = ['silver_bridge','defi','bridge','curated']
) }}
WITH base_evt AS (
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
'eywa' AS NAME,
event_index,
topics [0] :: STRING AS topic_0,
event_name,
decoded_log,
event_removed,
IFF(
tx_succeeded,
'SUCCESS',
'FAIL'
) AS tx_status,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__ez_decoded_event_logs') }}
WHERE
topics [0] :: STRING IN (
'0x5566d73d091d945ab32ea023cd1930c0d43aa43bef9aee4cb029775cfc94bdae',
--RequestSent
'0xb5f411fa3c897c9b0b6cd61852278a67e73d885610724a5610a8580d3e94cfdb'
) --locked
AND contract_address IN (
'0xece9cf6a8f2768a3b8b65060925b646afeaa5167',
--BridgeV2
'0xac8f44ceca92b2a4b30360e5bd3043850a0ffcbe',
--PortalV2
'0xbf0b5d561b986809924f88099c4ff0e6bcce60c9' --PortalV2
)
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
),
requestsent AS (
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
NAME,
event_index,
topic_0,
event_name,
decoded_log :"chainIdTo" :: STRING AS chainIdTo,
decoded_log :"data" :: STRING AS data_requestsent,
decoded_log :"requestId" :: STRING AS requestId,
decoded_log :"to" :: STRING AS to_address,
decoded_log,
event_removed,
tx_status,
_log_id,
_inserted_timestamp
FROM
base_evt
WHERE
topic_0 = '0x5566d73d091d945ab32ea023cd1930c0d43aa43bef9aee4cb029775cfc94bdae' --RequestSent
),
locked AS (
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
NAME,
event_index,
topic_0,
event_name,
TRY_TO_NUMBER(
decoded_log :"amount" :: STRING
) AS amount,
decoded_log :"from" :: STRING AS from_address,
decoded_log :"to" :: STRING AS to_address,
decoded_log :"token" :: STRING AS token,
decoded_log,
event_removed,
tx_status,
_log_id,
_inserted_timestamp
FROM
base_evt
WHERE
topic_0 = '0xb5f411fa3c897c9b0b6cd61852278a67e73d885610724a5610a8580d3e94cfdb' --Locked
)
SELECT
r.block_number,
r.block_timestamp,
r.origin_function_signature,
r.origin_from_address,
r.origin_to_address,
r.tx_hash,
r.event_index,
r.topic_0,
r.event_name,
r.event_removed,
r.tx_status,
r.contract_address AS bridge_address,
r.name AS platform,
l.from_address AS sender,
sender AS receiver,
receiver AS destination_chain_receiver,
l.amount,
r.chainIdTo AS destination_chain_id,
l.token AS token_address,
_log_id,
_inserted_timestamp
FROM
requestsent r
LEFT JOIN locked l USING(
block_number,
tx_hash
)
WHERE
token_address IS NOT NULL

View File

@ -1,74 +0,0 @@
version: 2
models:
- name: silver_bridge__eywa_requestsent
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: BLOCK_TIMESTAMP
tests:
- not_null
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null
- name: ORIGIN_FROM_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: ORIGIN_TO_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: EVENT_INDEX
tests:
- not_null
- name: EVENT_NAME
tests:
- not_null
- name: BRIDGE_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: SENDER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: RECEIVER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: DESTINATION_CHAIN_RECEIVER
tests:
- not_null
- name: AMOUNT
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- DECIMAL
- FLOAT
- NUMBER
- name: TOKEN_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: _INSERTED_TIMESTAMP
tests:
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 3

View File

@ -1,188 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = "block_number",
cluster_by = ['block_timestamp::DATE'],
tags = ['silver_bridge','defi','bridge','curated']
) }}
WITH token_transfers AS (
SELECT
block_number,
block_timestamp,
origin_function_signature,
origin_from_address,
origin_to_address,
tx_hash,
event_index,
contract_address,
from_address,
to_address,
raw_amount,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__ez_token_transfers') }}
WHERE
from_address <> '0x0000000000000000000000000000000000000000'
AND to_address = '0x25ab3efd52e6470681ce037cd546dc60726948d3'
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
),
native_transfers AS (
SELECT
et.block_number,
et.block_timestamp,
et.tx_hash,
tx.from_address AS origin_from_address,
tx.to_address AS origin_to_address,
tx.origin_function_signature,
et.from_address,
et.to_address,
amount_precise_raw,
et.ez_native_transfers_id AS _call_id,
et.modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__ez_native_transfers') }}
et
INNER JOIN {{ ref('core__fact_transactions') }}
tx
ON et.block_number = tx.block_number
AND et.tx_hash = tx.tx_hash
WHERE
et.to_address = '0x25ab3efd52e6470681ce037cd546dc60726948d3'
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
{% endif %}
),
all_transfers AS (
SELECT
block_number,
block_timestamp,
origin_from_address,
origin_to_address,
origin_function_signature,
tx_hash,
event_index,
'Transfer' AS event_name,
to_address AS bridge_address,
from_address AS sender,
to_address AS receiver,
raw_amount AS amount_unadj,
contract_address AS token_address,
{{ dbt_utils.generate_surrogate_key(
['_log_id']
) }} AS _id,
_inserted_timestamp
FROM
token_transfers
UNION ALL
SELECT
block_number,
block_timestamp,
origin_from_address,
origin_to_address,
origin_function_signature,
tx_hash,
NULL AS event_index,
NULL AS event_name,
to_address AS bridge_address,
from_address AS sender,
to_address AS receiver,
amount_precise_raw AS amount_unadj,
LOWER('0xbb4CdB9CBd36B01bD1cBaEBF2De08d9173bc095c') AS token_address,
{{ dbt_utils.generate_surrogate_key(
['_call_id']
) }} AS _id,
_inserted_timestamp
FROM
native_transfers
),
dst_info AS (
SELECT
block_number,
tx_hash,
topics [1] :: STRING AS encoded_data,
SUBSTR(RIGHT(encoded_data, 12), 1, 4) AS destination_chain_id,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__fact_event_logs') }}
WHERE
contract_address = '0x25ab3efd52e6470681ce037cd546dc60726948d3'
AND topic_0 IN (
'0x5ce4019f772fda6cb703b26bce3ec3006eb36b73f1d3a0eb441213317d9f5e9d',
'0x8d92c805c252261fcfff21ee60740eb8a38922469a7e6ee396976d57c22fc1c9'
)
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '16 hours'
FROM
{{ this }}
)
{% endif %}
)
SELECT
t.block_number,
block_timestamp,
origin_from_address,
origin_to_address,
origin_function_signature,
t.tx_hash,
event_index,
event_name,
'meson' AS platform,
bridge_address,
sender,
receiver,
CASE
WHEN origin_from_address = '0x0000000000000000000000000000000000000000' THEN sender
ELSE origin_from_address
END AS destination_chain_receiver,
amount_unadj,
destination_chain_id,
COALESCE(LOWER(chain), 'other') AS destination_chain,
token_address,
_id,
t._inserted_timestamp
FROM
all_transfers t
INNER JOIN dst_info d
ON t.tx_hash = d.tx_hash
AND t.block_number = d.block_number
LEFT JOIN {{ ref('silver_bridge__meson_chain_id_seed') }}
s
ON d.destination_chain_id :: STRING = RIGHT(
s.short_coin_type,
4
) :: STRING
WHERE
origin_to_address IS NOT NULL qualify (ROW_NUMBER() over (PARTITION BY _id
ORDER BY
t._inserted_timestamp DESC)) = 1

View File

@ -1,52 +0,0 @@
version: 2
models:
- name: silver_bridge__meson_transfers
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: BLOCK_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_LTZ
- TIMESTAMP_NTZ
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TOKEN_ADDRESS
tests:
- not_null
- name: SENDER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: DESTINATION_CHAIN_RECEIVER
tests:
- not_null
- name: BRIDGE_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: AMOUNT_UNADJ
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- DECIMAL
- FLOAT
- NUMBER
- name: _INSERTED_TIMESTAMP
tests:
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 3

View File

@ -1,81 +0,0 @@
version: 2
models:
- name: silver_bridge__complete_bridge_activity
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: BLOCK_TIMESTAMP
tests:
- not_null
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null
- name: ORIGIN_FROM_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: ORIGIN_TO_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: EVENT_INDEX
tests:
- not_null:
where: VERSION <> 'v1-native' AND PLATFORM NOT IN ('wormhole','meson')
- name: EVENT_NAME
tests:
- not_null:
where: VERSION <> 'v1-native' AND PLATFORM NOT IN ('wormhole','meson')
- name: BRIDGE_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: DESTINATION_CHAIN
- name: DESTINATION_CHAIN_RECEIVER
tests:
- not_null
- name: PLATFORM
- not_null
- name: VERSION
- not_null
- name: SENDER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: RECEIVER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: AMOUNT_UNADJ
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- DECIMAL
- FLOAT
- NUMBER
- name: TOKEN_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: _INSERTED_TIMESTAMP
tests:
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 3

View File

@ -1,134 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = "block_number",
tags = ['silver_bridge','defi','bridge','curated']
) }}
WITH base_contracts AS (
SELECT
tx_hash,
block_number,
block_timestamp,
from_address,
to_address AS contract_address,
concat_ws(
'-',
block_number,
tx_position,
CONCAT(
TYPE,
'_',
trace_address
)
) AS _call_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__fact_traces') }}
WHERE
from_address = LOWER('0xe7Ec689f432f29383f217e36e680B5C855051f25')
AND TYPE ILIKE 'create%'
AND tx_succeeded
AND trace_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND to_address NOT IN (
SELECT
DISTINCT pool_address
FROM
{{ this }}
)
{% endif %}
),
function_sigs AS (
SELECT
'0xfc0c546a' AS function_sig,
'token' AS function_name
),
inputs AS (
SELECT
contract_address,
block_number,
function_sig,
function_name,
0 AS function_input,
CONCAT(
function_sig,
LPAD(
function_input,
64,
0
)
) AS DATA
FROM
base_contracts
JOIN function_sigs
ON 1 = 1
),
contract_reads AS (
SELECT
contract_address,
block_number,
function_sig,
function_name,
function_input,
DATA,
utils.udf_json_rpc_call(
'eth_call',
[{ 'to': contract_address, 'from': null, 'data': data }, utils.udf_int_to_hex(block_number) ]
) AS rpc_request,
live.udf_api(
'POST',
CONCAT(
'{service}',
'/',
'{Authentication}'
),{},
rpc_request,
'Vault/prod/bsc/quicknode/mainnet'
) AS read_output,
SYSDATE() AS _inserted_timestamp
FROM
inputs
),
reads_flat AS (
SELECT
read_output,
read_output :data :id :: STRING AS read_id,
read_output :data :result :: STRING AS read_result,
SPLIT(
read_id,
'-'
) AS read_id_object,
function_sig,
function_name,
function_input,
DATA,
contract_address,
block_number,
_inserted_timestamp
FROM
contract_reads
)
SELECT
read_output,
read_id,
read_result,
read_id_object,
function_sig,
function_name,
function_input,
DATA,
block_number,
contract_address AS pool_address,
CONCAT('0x', SUBSTR(read_result, 27, 40)) AS token_address,
_inserted_timestamp
FROM
reads_flat

View File

@ -1,16 +0,0 @@
version: 2
models:
- name: silver_bridge__stargate_createpool
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- POOL_ADDRESS
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: TOKEN_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+

View File

@ -1,122 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = "block_number",
cluster_by = ['block_timestamp::DATE'],
tags = ['silver_bridge','defi','bridge','curated']
) }}
WITH pools AS (
SELECT
pool_address,
LOWER(token_address) AS token_address
FROM
{{ ref('silver_bridge__stargate_createpool') }}
),
base_evt AS (
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
'stargate' AS NAME,
event_index,
topics [0] :: STRING AS topic_0,
event_name,
TRY_TO_NUMBER(
decoded_log :"amountSD" :: STRING
) AS amountSD,
TRY_TO_NUMBER(
decoded_log :"chainId" :: STRING
) AS chainId,
CASE
WHEN chainId < 100 THEN chainId + 100
ELSE chainId
END AS destination_chain_id,
TRY_TO_NUMBER(
decoded_log :"dstPoolId" :: STRING
) AS dstPoolId,
TRY_TO_NUMBER(
decoded_log :"eqFee" :: STRING
) AS eqFee,
TRY_TO_NUMBER(
decoded_log :"eqReward" :: STRING
) AS eqReward,
TRY_TO_NUMBER(
decoded_log :"amountSD" :: STRING
) AS lpFee,
TRY_TO_NUMBER(
decoded_log :"amountSD" :: STRING
) AS protocolFee,
decoded_log :"from" :: STRING AS from_address,
decoded_log,
token_address,
event_removed,
IFF(
tx_succeeded,
'SUCCESS',
'FAIL'
) AS tx_status,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__ez_decoded_event_logs') }}
d
INNER JOIN pools p
ON d.contract_address = p.pool_address
WHERE
topics [0] :: STRING = '0x34660fc8af304464529f48a778e03d03e4d34bcd5f9b6f0cfbf3cd238c642f7f'
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
)
SELECT
block_number,
block_timestamp,
origin_function_signature,
origin_from_address,
origin_to_address,
tx_hash,
event_index,
topic_0,
event_name,
event_removed,
tx_status,
LOWER('0x6694340fc020c5E6B96567843da2df01b2CE1eb6') AS bridge_address,
NAME AS platform,
from_address AS sender,
from_address AS receiver,
receiver AS destination_chain_receiver,
amountSD AS amount_unadj,
destination_chain_id,
LOWER(chain_name) AS destination_chain,
dstPoolId AS destination_pool_id,
eqFee AS fee,
eqReward AS reward,
lpFee AS lp_fee,
protocolFee AS protocol_fee,
token_address,
_log_id,
_inserted_timestamp
FROM
base_evt b
LEFT JOIN {{ ref('silver_bridge__stargate_chain_id_seed') }}
s
ON b.destination_chain_id :: STRING = s.chain_id :: STRING

View File

@ -1,74 +0,0 @@
version: 2
models:
- name: silver_bridge__stargate_swap
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: BLOCK_TIMESTAMP
tests:
- not_null
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null
- name: ORIGIN_FROM_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: ORIGIN_TO_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: EVENT_INDEX
tests:
- not_null
- name: EVENT_NAME
tests:
- not_null
- name: BRIDGE_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: SENDER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: RECEIVER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: DESTINATION_CHAIN_RECEIVER
tests:
- not_null
- name: AMOUNT_UNADJ
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- DECIMAL
- FLOAT
- NUMBER
- name: TOKEN_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: _INSERTED_TIMESTAMP
tests:
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 3

View File

@ -1,93 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = "block_number",
cluster_by = ['block_timestamp::DATE'],
tags = ['silver_bridge','defi','bridge','curated']
) }}
WITH base_evt AS (
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
'symbiosis' AS NAME,
event_index,
topics [0] :: STRING AS topic_0,
event_name,
TRY_TO_NUMBER(
decoded_log :"amount" :: STRING
) AS amount,
TRY_TO_NUMBER(
decoded_log :"chainID" :: STRING
) AS chainID,
decoded_log :"from" :: STRING AS from_address,
decoded_log :"id" :: STRING AS id,
decoded_log :"revertableAddress" :: STRING AS revertableAddress,
decoded_log :"to" :: STRING AS to_address,
decoded_log :"token" :: STRING AS token,
decoded_log,
event_removed,
IFF(
tx_succeeded,
'SUCCESS',
'FAIL'
) AS tx_status,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__ez_decoded_event_logs') }}
WHERE
topics [0] :: STRING = '0x31325fe0a1a2e6a5b1e41572156ba5b4e94f0fae7e7f63ec21e9b5ce1e4b3eab'
AND contract_address IN (
'0xb91d3060c90aac7c4c706aef2b37997b3b2a1dcf',
'0x5aa5f7f84ed0e5db0a4a85c3947ea16b53352fd4'
)
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
)
SELECT
block_number,
block_timestamp,
origin_function_signature,
origin_from_address,
origin_to_address,
tx_hash,
event_index,
topic_0,
event_name,
event_removed,
tx_status,
contract_address AS bridge_address,
NAME AS platform,
from_address AS sender,
to_address AS receiver,
receiver AS destination_chain_receiver,
amount,
chainID AS destination_chain_id,
id,
revertableAddress AS revertable_address,
token AS token_address,
_log_id,
_inserted_timestamp
FROM
base_evt

View File

@ -1,74 +0,0 @@
version: 2
models:
- name: silver_bridge__symbiosis_synthesizerequest
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: BLOCK_TIMESTAMP
tests:
- not_null
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null
- name: ORIGIN_FROM_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: ORIGIN_TO_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: EVENT_INDEX
tests:
- not_null
- name: EVENT_NAME
tests:
- not_null
- name: BRIDGE_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: SENDER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: RECEIVER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: DESTINATION_CHAIN_RECEIVER
tests:
- not_null
- name: AMOUNT
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- DECIMAL
- FLOAT
- NUMBER
- name: TOKEN_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: _INSERTED_TIMESTAMP
tests:
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 3

View File

@ -1,93 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = "block_number",
cluster_by = ['block_timestamp::DATE'],
tags = ['silver_bridge','defi','bridge','curated']
) }}
WITH base_evt AS (
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
'synapse' AS NAME,
event_index,
topics [0] :: STRING AS topic_0,
event_name,
TRY_TO_NUMBER(
decoded_log :"amount" :: STRING
) AS amount,
TRY_TO_NUMBER(
decoded_log :"chainId" :: STRING
) AS chainId,
decoded_log :"to" :: STRING AS to_address,
decoded_log :"token" :: STRING AS token,
decoded_log,
event_removed,
IFF(
tx_succeeded,
'SUCCESS',
'FAIL'
) AS tx_status,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__ez_decoded_event_logs') }}
WHERE
topics [0] :: STRING IN (
'0xdc5bad4651c5fbe9977a696aadc65996c468cde1448dd468ec0d83bf61c4b57c',
--redeem
'0xda5273705dbef4bf1b902a131c2eac086b7e1476a8ab0cb4da08af1fe1bd8e3b' --deposit
)
AND contract_address IN (
'0x0efc29e196da2e81afe96edd041bedcdf9e74893',
'0xd123f70ae324d34a9e76b67a27bf77593ba8749f'
)
AND origin_to_address IS NOT NULL
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
)
SELECT
block_number,
block_timestamp,
origin_function_signature,
origin_from_address,
origin_to_address,
tx_hash,
event_index,
topic_0,
event_name,
event_removed,
tx_status,
contract_address AS bridge_address,
NAME AS platform,
amount,
origin_from_address AS sender,
to_address AS receiver,
receiver AS destination_chain_receiver,
chainId AS destination_chain_id,
token AS token_address,
_log_id,
_inserted_timestamp
FROM
base_evt

View File

@ -1,72 +0,0 @@
version: 2
models:
- name: silver_bridge__synapse_token_bridge
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: BLOCK_TIMESTAMP
tests:
- not_null
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null
- name: ORIGIN_FROM_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: ORIGIN_TO_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: EVENT_INDEX
tests:
- not_null
- name: EVENT_NAME
tests:
- not_null
- name: BRIDGE_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: SENDER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: RECEIVER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: DESTINATION_CHAIN_RECEIVER
tests:
- not_null
- name: AMOUNT
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- DECIMAL
- FLOAT
- NUMBER
- name: TOKEN_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: _INSERTED_TIMESTAMP
tests:
- not_null

View File

@ -1,104 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = "block_number",
cluster_by = ['block_timestamp::DATE'],
tags = ['silver_bridge','defi','bridge','curated']
) }}
WITH base_evt AS (
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
'synapse' AS NAME,
event_index,
topics [0] :: STRING AS topic_0,
event_name,
TRY_TO_NUMBER(
decoded_log :"amount" :: STRING
) AS amount,
TRY_TO_NUMBER(
decoded_log :"chainId" :: STRING
) AS chainId,
TRY_TO_TIMESTAMP(
decoded_log :"deadline" :: STRING
) AS deadline,
TRY_TO_NUMBER(
decoded_log :"minDy" :: STRING
) AS minDy,
decoded_log :"to" :: STRING AS to_address,
decoded_log :"token" :: STRING AS token,
TRY_TO_NUMBER(
decoded_log :"tokenIndexFrom" :: STRING
) AS tokenIndexFrom,
TRY_TO_NUMBER(
decoded_log :"tokenIndexTo" :: STRING
) AS tokenIndexTo,
decoded_log,
event_removed,
IFF(
tx_succeeded,
'SUCCESS',
'FAIL'
) AS tx_status,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__ez_decoded_event_logs') }}
WHERE
topics [0] :: STRING = '0x91f25e9be0134ec851830e0e76dc71e06f9dade75a9b84e9524071dbbc319425'
AND contract_address IN (
'0x0efc29e196da2e81afe96edd041bedcdf9e74893',
'0xd123f70ae324d34a9e76b67a27bf77593ba8749f'
)
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
)
SELECT
block_number,
block_timestamp,
origin_function_signature,
origin_from_address,
origin_to_address,
tx_hash,
event_index,
topic_0,
event_name,
event_removed,
tx_status,
contract_address AS bridge_address,
NAME AS platform,
origin_from_address AS sender,
to_address AS receiver,
receiver AS destination_chain_receiver,
amount,
chainId AS destination_chain_id,
token AS token_address,
deadline,
minDy AS min_dy,
tokenIndexFrom AS token_index_from,
tokenIndexTo AS token_index_to,
_log_id,
_inserted_timestamp
FROM
base_evt

View File

@ -1,74 +0,0 @@
version: 2
models:
- name: silver_bridge__synapse_tokenbridgeandswap
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: BLOCK_TIMESTAMP
tests:
- not_null
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null
- name: ORIGIN_FROM_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: ORIGIN_TO_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: EVENT_INDEX
tests:
- not_null
- name: EVENT_NAME
tests:
- not_null
- name: BRIDGE_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: SENDER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: RECEIVER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: DESTINATION_CHAIN_RECEIVER
tests:
- not_null
- name: AMOUNT
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- DECIMAL
- FLOAT
- NUMBER
- name: TOKEN_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: _INSERTED_TIMESTAMP
tests:
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 3

View File

@ -1,284 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = "block_number",
cluster_by = ['block_timestamp::DATE'],
tags = ['silver_bridge','defi','bridge','curated']
) }}
WITH token_transfers AS (
SELECT
tr.block_number,
tr.block_timestamp,
tr.origin_function_signature,
tr.origin_from_address,
tr.origin_to_address,
tr.tx_hash,
event_index,
tr.contract_address,
tr.from_address,
tr.to_address,
raw_amount,
regexp_substr_all(SUBSTR(input_data, 11, len(input_data)), '.{64}') AS segmented_data,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [2] :: STRING
)
) AS destination_chain_id,
CONCAT(
'0x',
segmented_data [3] :: STRING
) AS recipient1,
CONCAT('0x', SUBSTR(segmented_data [3] :: STRING, 25, 40)) AS recipient2,
LENGTH(
REGEXP_SUBSTR(
segmented_data [3] :: STRING,
'^(0*)'
)
) AS len,
CASE
WHEN len >= 24 THEN recipient2
ELSE recipient1
END AS destination_recipient_address,
CONCAT('0x', SUBSTR(segmented_data [0] :: STRING, 25, 40)) AS token,
TRY_TO_NUMBER(utils.udf_hex_to_int(segmented_data [1] :: STRING)) AS amount,
utils.udf_hex_to_int(
segmented_data [4] :: STRING
) AS arbiterFee,
utils.udf_hex_to_int(
segmented_data [5] :: STRING
) AS nonce,
CONCAT(tr.tx_hash :: STRING, '-', tr.event_index :: STRING) AS _log_id,
tr.modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__ez_token_transfers') }}
tr
INNER JOIN {{ ref('core__fact_transactions') }}
tx
ON tr.block_number = tx.block_number
AND tr.tx_hash = tx.tx_hash
WHERE
tr.from_address <> '0x0000000000000000000000000000000000000000'
AND tr.to_address = LOWER('0xB6F6D86a8f9879A9c87f643768d9efc38c1Da6E7')
AND tr.origin_function_signature = '0x0f5287b0' -- tokenTransfer
AND destination_chain_id <> 0
{% if is_incremental() %}
AND tr.modified_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
{% endif %}
),
native_transfers AS (
SELECT
et.block_number,
et.block_timestamp,
et.tx_hash,
tx.from_address AS origin_from_address,
tx.to_address AS origin_to_address,
tx.origin_function_signature,
et.from_address,
et.to_address,
amount_precise_raw,
regexp_substr_all(SUBSTR(input_data, 11, len(input_data)), '.{64}') AS segmented_data,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [0] :: STRING
)
) AS destination_chain_id,
CONCAT(
'0x',
segmented_data [1] :: STRING
) AS recipient1,
CONCAT('0x', SUBSTR(segmented_data [1] :: STRING, 25, 40)) AS recipient2,
LENGTH(
REGEXP_SUBSTR(
segmented_data [1] :: STRING,
'^(0*)'
)
) AS len,
CASE
WHEN len >= 24 THEN recipient2
ELSE recipient1
END AS destination_recipient_address,
utils.udf_hex_to_int(
segmented_data [2] :: STRING
) AS arbiterFee,
utils.udf_hex_to_int(
segmented_data [3] :: STRING
) AS nonce,
et.ez_native_transfers_id AS _call_id,
et.modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__ez_native_transfers') }}
et
INNER JOIN {{ ref('core__fact_transactions') }}
tx
ON et.block_number = tx.block_number
AND et.tx_hash = tx.tx_hash
WHERE
et.to_address = LOWER('0xB6F6D86a8f9879A9c87f643768d9efc38c1Da6E7')
AND tx.origin_function_signature = '0x9981509f' -- wrapAndTransfer
AND destination_chain_id <> 0
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
{% endif %}
),
all_transfers AS (
SELECT
block_number,
block_timestamp,
origin_from_address,
origin_to_address,
origin_function_signature,
tx_hash,
event_index,
'Transfer' AS event_name,
to_address AS bridge_address,
from_address AS sender,
to_address AS receiver,
raw_amount AS amount_unadj,
destination_chain_id,
contract_address AS token_address,
destination_recipient_address,
{{ dbt_utils.generate_surrogate_key(
['_log_id']
) }} AS _id,
_inserted_timestamp
FROM
token_transfers
UNION ALL
SELECT
block_number,
block_timestamp,
origin_from_address,
origin_to_address,
origin_function_signature,
tx_hash,
NULL AS event_index,
NULL AS event_name,
to_address AS bridge_address,
from_address AS sender,
to_address AS receiver,
amount_precise_raw AS amount_unadj,
destination_chain_id,
LOWER('0xbb4CdB9CBd36B01bD1cBaEBF2De08d9173bc095c') AS token_address,
destination_recipient_address,
{{ dbt_utils.generate_surrogate_key(
['_call_id']
) }} AS _id,
_inserted_timestamp
FROM
native_transfers
),
base_near AS (
SELECT
near_address,
addr_encoded
FROM
{{ source(
'crosschain_silver',
'near_address_encoded'
) }}
)
SELECT
block_number,
block_timestamp,
origin_from_address,
origin_to_address,
origin_function_signature,
tx_hash,
event_index,
event_name,
'wormhole' AS platform,
bridge_address,
sender,
receiver,
amount_unadj,
destination_chain_id,
chain_name AS destination_chain,
token_address,
destination_recipient_address,
--hex address on the destination chain, requires decoding for non-EVM - more info: https://docs.wormhole.com/wormhole/blockchain-environments/environments
CASE
WHEN destination_chain = 'solana' THEN utils.udf_hex_to_base58(destination_recipient_address)
WHEN destination_chain IN (
'injective',
'sei'
) THEN utils.udf_hex_to_bech32(
destination_recipient_address,
SUBSTR(
destination_chain,
1,
3
)
)
WHEN destination_chain IN (
'osmosis',
'xpla'
) THEN utils.udf_hex_to_bech32(
destination_recipient_address,
SUBSTR(
destination_chain,
1,
4
)
)
WHEN destination_chain IN (
'terra',
'terra2',
'evmos'
) THEN utils.udf_hex_to_bech32(
destination_recipient_address,
SUBSTR(
destination_chain,
1,
5
)
)
WHEN destination_chain IN (
'cosmoshub',
'kujira'
) THEN utils.udf_hex_to_bech32(
destination_recipient_address,
SUBSTR(
destination_chain,
1,
6
)
)
WHEN destination_chain IN ('near') THEN COALESCE(
near_address,
destination_recipient_address
)
WHEN destination_chain IN ('algorand') THEN utils.udf_hex_to_algorand(destination_recipient_address)
WHEN destination_chain IN ('polygon') THEN SUBSTR(
destination_recipient_address,
1,
42
)
ELSE destination_recipient_address
END AS destination_chain_receiver,
_id,
_inserted_timestamp
FROM
all_transfers t
LEFT JOIN {{ ref('silver_bridge__wormhole_chain_id_seed') }}
s
ON t.destination_chain_id :: STRING = s.wormhole_chain_id :: STRING
LEFT JOIN base_near n
ON t.destination_recipient_address = n.addr_encoded
WHERE
origin_to_address IS NOT NULL qualify (ROW_NUMBER() over (PARTITION BY _id
ORDER BY
_inserted_timestamp DESC)) = 1

View File

@ -1,52 +0,0 @@
version: 2
models:
- name: silver_bridge__wormhole_transfers
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: BLOCK_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_LTZ
- TIMESTAMP_NTZ
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TOKEN_ADDRESS
tests:
- not_null
- name: SENDER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: BRIDGE_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: AMOUNT_UNADJ
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- DECIMAL
- FLOAT
- NUMBER
- name: DESTINATION_CHAIN_RECEIVER
tests:
- not_null
- name: _INSERTED_TIMESTAMP
tests:
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 3

View File

@ -1,61 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = 'pool_address',
tags = ['silver_dex','defi','dex','curated']
) }}
WITH pool_creation AS (
SELECT
block_number,
block_timestamp,
tx_hash,
event_index,
contract_address,
regexp_substr_all(SUBSTR(DATA, 3, len(DATA)), '.{64}') AS segmented_data,
CONCAT('0x', SUBSTR(topics [1] :: STRING, 27, 40)) AS token0,
CONCAT('0x', SUBSTR(topics [2] :: STRING, 27, 40)) AS token1,
CONCAT('0x', SUBSTR(segmented_data [0] :: STRING, 25, 40)) AS pool_address,
utils.udf_hex_to_int(
segmented_data [1] :: STRING
) :: INTEGER AS pool_id,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref ('core__fact_event_logs') }}
WHERE
contract_address = '0x858e3312ed3a876947ea49d572a7c42de08af7ee'
AND topics [0] :: STRING = '0x0d3648bd0f6ba80134a33ba9275ac585d9d315f0ad8355cddefde31afa28d0e9' --PairCreated
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
)
SELECT
block_number,
block_timestamp,
tx_hash,
contract_address,
event_index,
token0,
token1,
pool_address,
pool_id,
_log_id,
_inserted_timestamp
FROM
pool_creation qualify(ROW_NUMBER() over (PARTITION BY pool_address
ORDER BY
_inserted_timestamp DESC)) = 1

View File

@ -1,23 +0,0 @@
version: 2
models:
- name: silver_dex__biswap_pools
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- POOL_ADDRESS
columns:
- name: POOL_ADDRESS
tests:
- not_null
- name: TOKEN0
tests:
- not_null
- name: TOKEN1
tests:
- not_null
- name: _INSERTED_TIMESTAMP
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_NTZ
- TIMESTAMP_LTZ

View File

@ -1,130 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = 'block_number',
cluster_by = ['block_timestamp::DATE'],
tags = ['silver_dex','defi','dex','curated']
) }}
WITH pools AS (
SELECT
pool_address,
token0,
token1
FROM
{{ ref('silver_dex__biswap_pools') }}
),
swaps_base AS (
SELECT
block_number,
origin_function_signature,
origin_from_address,
origin_to_address,
block_timestamp,
tx_hash,
event_index,
contract_address,
regexp_substr_all(SUBSTR(DATA, 3, len(DATA)), '.{64}') AS segmented_data,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [0] :: STRING
)
) AS amount0In,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [1] :: STRING
)
) AS amount1In,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [2] :: STRING
)
) AS amount0Out,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [3] :: STRING
)
) AS amount1Out,
CONCAT('0x', SUBSTR(topics [1] :: STRING, 27, 40)) AS sender,
CONCAT('0x', SUBSTR(topics [2] :: STRING, 27, 40)) AS tx_to,
token0,
token1,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__fact_event_logs') }}
l
INNER JOIN pools p
ON LOWER(
p.pool_address
) = LOWER(
l.contract_address
)
WHERE
topics [0] :: STRING = '0xd78ad95fa46c994b6551d0da85fc275fe613ce37657fb8d5e3d130840159d822'
AND tx_succeeded
AND l.block_number >= 7665803
AND l.block_timestamp >= '2021-05-20'
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
)
SELECT
block_number,
block_timestamp,
origin_function_signature,
origin_from_address,
origin_to_address,
tx_hash,
event_index,
contract_address,
sender,
tx_to,
amount0In,
amount1In,
amount0Out,
amount1Out,
token0,
token1,
CASE
WHEN amount0In <> 0
AND amount1In <> 0
AND amount0Out <> 0 THEN amount1In
WHEN amount0In <> 0 THEN amount0In
WHEN amount1In <> 0 THEN amount1In
END AS amount_in_unadj,
CASE
WHEN amount0Out <> 0 THEN amount0Out
WHEN amount1Out <> 0 THEN amount1Out
END AS amount_out_unadj,
CASE
WHEN amount0In <> 0
AND amount1In <> 0
AND amount0Out <> 0 THEN token1
WHEN amount0In <> 0 THEN token0
WHEN amount1In <> 0 THEN token1
END AS token_in,
CASE
WHEN amount0Out <> 0 THEN token0
WHEN amount1Out <> 0 THEN token1
END AS token_out,
'Swap' AS event_name,
'biswap' AS platform,
_log_id,
_inserted_timestamp
FROM
swaps_base
WHERE
token_in <> token_out

View File

@ -1,116 +0,0 @@
version: 2
models:
- name: silver_dex__biswap_swaps
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: BLOCK_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 2
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_NTZ
- TIMESTAMP_LTZ
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: CONTRACT_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: AMOUNT_IN_UNADJ
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: AMOUNT_IN
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: AMOUNT_OUT_UNADJ
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: AMOUNT_OUT
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: TOKEN_IN
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TOKEN_OUT
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: SYMBOL_IN
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- STRING
- VARCHAR
- name: SYMBOL_OUT
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- STRING
- VARCHAR
- name: TX_TO
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: PLATFORM
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- STRING
- VARCHAR
- name: EVENT_INDEX
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: _LOG_ID
tests:
- not_null
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null
- name: ORIGIN_FROM_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: ORIGIN_TO_ADDRESS
tests:
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+

View File

@ -1,57 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = 'pool_address',
tags = ['silver_dex','defi','dex','curated']
) }}
WITH pool_events AS (
SELECT
block_number,
block_timestamp,
tx_hash,
event_index,
contract_address,
regexp_substr_all(SUBSTR(DATA, 3, len(DATA)), '.{64}') AS segmented_data,
CONCAT('0x', SUBSTR(segmented_data [0] :: STRING, 25, 40)) AS newBorn,
CONCAT('0x', SUBSTR(segmented_data [1] :: STRING, 25, 40)) AS baseToken,
CONCAT('0x', SUBSTR(segmented_data [2] :: STRING, 25, 40)) AS quoteToken,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref ('core__fact_event_logs') }}
WHERE
contract_address = '0xca459456a45e300aa7ef447dbb60f87cccb42828' --DODOZoo
AND topics [0] :: STRING = '0x5c428a2e12ecaa744a080b25b4cda8b86359c82d726575d7d747e07708071f93' --DODOBirth
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
)
SELECT
block_number,
block_timestamp,
tx_hash,
event_index,
contract_address,
newBorn AS pool_address,
baseToken AS base_token,
quoteToken AS quote_token,
_log_id AS _id,
_inserted_timestamp
FROM
pool_events qualify(ROW_NUMBER() over (PARTITION BY pool_address
ORDER BY
_inserted_timestamp DESC)) = 1

View File

@ -1,17 +0,0 @@
version: 2
models:
- name: silver_dex__dodo_v1_pools
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- POOL_ADDRESS
columns:
- name: POOL_ADDRESS
tests:
- not_null
- name: _INSERTED_TIMESTAMP
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_LTZ
- TIMESTAMP_NTZ

View File

@ -1,191 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = 'block_number',
cluster_by = ['block_timestamp::DATE'],
tags = ['silver_dex','defi','dex','curated']
) }}
WITH pools AS (
SELECT
pool_address,
base_token,
quote_token
FROM
{{ ref('silver_dex__dodo_v1_pools') }}
),
proxies AS (
SELECT
'0x8e4842d0570c85ba3805a9508dce7c6a458359d0' AS proxy_address
UNION ALL
SELECT
'0x0596908263ef2724fbfbcafa1c983fcd7a629038'
UNION ALL
SELECT
'0x165ba87e882208100672b6c56f477ee42502c820'
UNION ALL
SELECT
'0xab623fbcaeb522046185051911209f5b2c2a2e1f'
),
sell_base_token AS (
SELECT
l.block_number,
l.block_timestamp,
l.tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
l.event_index,
l.contract_address,
regexp_substr_all(SUBSTR(l.data, 3, len(l.data)), '.{64}') AS segmented_data,
CONCAT('0x', SUBSTR(l.topics [1] :: STRING, 27, 40)) AS seller_address,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [0] :: STRING
)
) AS payBase,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [1] :: STRING
)
) AS receiveQuote,
base_token,
quote_token,
quote_token AS tokenIn,
base_token AS tokenOut,
receiveQuote AS amountIn,
payBase AS amountOut,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__fact_event_logs') }}
l
INNER JOIN pools p
ON p.pool_address = l.contract_address
WHERE
topics [0] :: STRING = '0xd8648b6ac54162763c86fd54bf2005af8ecd2f9cb273a5775921fd7f91e17b2d' --sellBaseToken
AND seller_address NOT IN (
SELECT
proxy_address
FROM
proxies
)
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
),
buy_base_token AS (
SELECT
l.block_number,
l.block_timestamp,
l.tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
l.event_index,
l.contract_address,
regexp_substr_all(SUBSTR(l.data, 3, len(l.data)), '.{64}') AS segmented_data,
CONCAT('0x', SUBSTR(l.topics [1] :: STRING, 27, 40)) AS buyer_address,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [0] :: STRING
)
) AS receiveBase,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [1] :: STRING
)
) AS payQuote,
base_token,
quote_token,
quote_token AS tokenIn,
base_token AS tokenOut,
payQuote AS amountIn,
receiveBase AS amountOut,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__fact_event_logs') }}
l
INNER JOIN pools p
ON p.pool_address = l.contract_address
WHERE
topics [0] :: STRING = '0xe93ad76094f247c0dafc1c61adc2187de1ac2738f7a3b49cb20b2263420251a3' --buyBaseToken
AND buyer_address NOT IN (
SELECT
proxy_address
FROM
proxies
)
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
)
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
event_index,
contract_address,
seller_address AS sender,
origin_from_address AS tx_to,
tokenIn AS token_in,
tokenOut AS token_out,
amountIn AS amount_in_unadj,
amountOut AS amount_out_unadj,
'SellBaseToken' AS event_name,
'dodo-v1' AS platform,
_log_id,
_inserted_timestamp
FROM
sell_base_token
UNION ALL
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
event_index,
contract_address,
buyer_address AS sender,
origin_from_address AS tx_to,
tokenIn AS token_in,
tokenOut AS token_out,
amountIn AS amount_in_unadj,
amountOut AS amount_out_unadj,
'BuyBaseToken' AS event_name,
'dodo-v1' AS platform,
_log_id,
_inserted_timestamp
FROM
buy_base_token

View File

@ -1,56 +0,0 @@
version: 2
models:
- name: silver_dex__dodo_v1_swaps
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: AMOUNT_IN_UNADJ
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- decimal
- float
- number
- name: AMOUNT_OUT_UNADJ
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- decimal
- float
- number
- name: TOKEN_IN
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TOKEN_OUT
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: BLOCK_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 1
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_LTZ
- TIMESTAMP_NTZ
- name: EVENT_INDEX
tests:
- not_null
- name: SENDER
tests:
- not_null
- name: TX_TO
tests:
- not_null
- name: TX_HASH
tests:
- not_null

View File

@ -1,72 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = 'pool_address',
tags = ['silver_dex','defi','dex','curated']
) }}
WITH pools AS (
SELECT
block_number,
block_timestamp,
tx_hash,
event_index,
contract_address,
regexp_substr_all(SUBSTR(DATA, 3, len(DATA)), '.{64}') AS segmented_data,
CONCAT('0x', SUBSTR(segmented_data [0] :: STRING, 25, 40)) AS baseToken,
CONCAT('0x', SUBSTR(segmented_data [1] :: STRING, 25, 40)) AS quoteToken,
CONCAT('0x', SUBSTR(segmented_data [2] :: STRING, 25, 40)) AS creator,
CONCAT('0x', SUBSTR(segmented_data [3] :: STRING, 25, 40)) AS pool_address,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref ('core__fact_event_logs') }}
WHERE
contract_address IN (
'0xafe0a75dffb395eaabd0a7e1bbbd0b11f8609eef',
--dpp - factory,
'0xd9cac3d964327e47399aebd8e1e6dcc4c251daae',
--dpp advanced - private pool factory
'0x0fb9815938ad069bf90e14fe6c596c514bede767',
--dsp - factory
'0x790b4a80fb1094589a3c0efc8740aa9b0c1733fb' --dvm - factory
)
AND topics [0] :: STRING IN (
'0x8494fe594cd5087021d4b11758a2bbc7be28a430e94f2b268d668e5991ed3b8a',
--NewDPP
'0xbc1083a2c1c5ef31e13fb436953d22b47880cf7db279c2c5666b16083afd6b9d',
--NewDSP
'0xaf5c5f12a80fc937520df6fcaed66262a4cc775e0f3fceaf7a7cfe476d9a751d' --NewDVM
)
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
{% endif %}
)
SELECT
block_number,
block_timestamp,
tx_hash,
event_index,
contract_address,
baseToken AS base_token,
quoteToken AS quote_token,
creator,
pool_address,
_log_id,
_inserted_timestamp
FROM
pools qualify(ROW_NUMBER() over (PARTITION BY pool_address
ORDER BY
_inserted_timestamp DESC)) = 1

View File

@ -1,17 +0,0 @@
version: 2
models:
- name: silver_dex__dodo_v2_pools
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- POOL_ADDRESS
columns:
- name: POOL_ADDRESS
tests:
- not_null
- name: _INSERTED_TIMESTAMP
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_LTZ
- TIMESTAMP_NTZ

View File

@ -1,133 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = 'block_number',
cluster_by = ['block_timestamp::DATE'],
tags = ['silver_dex','defi','dex','curated']
) }}
WITH pools AS (
SELECT
DISTINCT pool_address
FROM
{{ ref('silver_dex__dodo_v2_pools') }}
),
proxies AS (
SELECT
'0x8e4842d0570c85ba3805a9508dce7c6a458359d0' AS proxy_address
UNION ALL
SELECT
'0x0596908263ef2724fbfbcafa1c983fcd7a629038'
UNION ALL
SELECT
'0x165ba87e882208100672b6c56f477ee42502c820'
UNION ALL
SELECT
'0xab623fbcaeb522046185051911209f5b2c2a2e1f'
),
swaps_base AS (
SELECT
l.block_number,
l.block_timestamp,
l.tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
l.event_index,
l.contract_address,
regexp_substr_all(SUBSTR(l.data, 3, len(l.data)), '.{64}') AS segmented_data,
CONCAT(
'0x',
SUBSTR(
segmented_data [0] :: STRING,
25,
40
)
) AS fromToken,
CONCAT(
'0x',
SUBSTR(
segmented_data [1] :: STRING,
25,
40
)
) AS toToken,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [2] :: STRING
)
) AS fromAmount,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [3] :: STRING
)
) AS toAmount,
CONCAT(
'0x',
SUBSTR(
segmented_data [4] :: STRING,
25,
40
)
) AS trader_address,
CONCAT(
'0x',
SUBSTR(
segmented_data [5] :: STRING,
25,
40
)
) AS receiver_address,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__fact_event_logs') }}
l
INNER JOIN pools p
ON l.contract_address = p.pool_address
WHERE
l.topics [0] :: STRING = '0xc2c0245e056d5fb095f04cd6373bc770802ebd1e6c918eb78fdef843cdb37b0f' --dodoswap
AND trader_address NOT IN (
SELECT
proxy_address
FROM
proxies
)
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
)
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
event_index,
contract_address,
fromToken AS token_in,
toToken AS token_out,
fromAmount AS amount_in_unadj,
toAmount AS amount_out_unadj,
trader_address AS sender,
receiver_address AS tx_to,
'DodoSwap' AS event_name,
'dodo-v2' AS platform,
_log_id,
_inserted_timestamp
FROM
swaps_base

View File

@ -1,56 +0,0 @@
version: 2
models:
- name: silver_dex__dodo_v2_swaps
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: AMOUNT_IN_UNADJ
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- decimal
- float
- number
- name: AMOUNT_OUT_UNADJ
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- decimal
- float
- number
- name: TOKEN_IN
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TOKEN_OUT
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: BLOCK_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 1
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_LTZ
- TIMESTAMP_NTZ
- name: EVENT_INDEX
tests:
- not_null
- name: SENDER
tests:
- not_null
- name: TX_TO
tests:
- not_null
- name: TX_HASH
tests:
- not_null

View File

@ -1,65 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = 'pool_address',
tags = ['silver_dex','defi','dex','curated']
) }}
WITH pool_creation AS (
SELECT
block_number,
block_timestamp,
tx_hash,
event_index,
contract_address AS factory_address,
regexp_substr_all(SUBSTR(DATA, 3, len(DATA)), '.{64}') AS segmented_data,
CONCAT('0x', SUBSTR(topics [1] :: STRING, 27, 40)) AS token0,
CONCAT('0x', SUBSTR(topics [2] :: STRING, 27, 40)) AS token1,
CONCAT('0x', SUBSTR(segmented_data [0] :: STRING, 25, 40)) AS pool_address,
utils.udf_hex_to_int(
segmented_data [1] :: STRING
) :: INTEGER AS pool_id,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref ('core__fact_event_logs') }}
WHERE
contract_address IN (
'0x5ca135cb8527d76e932f34b5145575f9d8cbe08e',
--v1 factory
'0xf89e6ca06121b6d4370f4b196ae458e8b969a011' --v2 factory
)
AND topics [0] :: STRING = '0x0d3648bd0f6ba80134a33ba9275ac585d9d315f0ad8355cddefde31afa28d0e9' --pairCreated
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
)
SELECT
block_number,
block_timestamp,
tx_hash,
factory_address,
event_index,
token0,
token1,
pool_address,
pool_id,
_log_id,
_inserted_timestamp
FROM
pool_creation qualify(ROW_NUMBER() over (PARTITION BY pool_address
ORDER BY
_inserted_timestamp DESC)) = 1

View File

@ -1,23 +0,0 @@
version: 2
models:
- name: silver_dex__fraxswap_pools
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- POOL_ADDRESS
columns:
- name: POOL_ADDRESS
tests:
- not_null
- name: TOKEN0
tests:
- not_null
- name: TOKEN1
tests:
- not_null
- name: _INSERTED_TIMESTAMP
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_NTZ
- TIMESTAMP_LTZ

View File

@ -1,124 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = 'block_number',
cluster_by = ['block_timestamp::DATE'],
tags = ['silver_dex','defi','dex','curated']
) }}
WITH pools AS (
SELECT
pool_address,
token0,
token1
FROM
{{ ref('silver_dex__fraxswap_pools') }}
),
swaps_base AS (
SELECT
l.block_number,
l.block_timestamp,
l.tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
l.event_index,
l.contract_address,
regexp_substr_all(SUBSTR(l.data, 3, len(l.data)), '.{64}') AS segmented_data,
CONCAT('0x', SUBSTR(l.topics [1] :: STRING, 27, 40)) AS sender_address,
CONCAT('0x', SUBSTR(l.topics [2] :: STRING, 27, 40)) AS to_address,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [0] :: STRING
)
) AS amount0In,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [1] :: STRING
)
) AS amount1In,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [2] :: STRING
)
) AS amount0Out,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [3] :: STRING
)
) AS amount1Out,
token0,
token1,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__fact_event_logs') }}
l
INNER JOIN pools
ON l.contract_address = pool_address
WHERE
l.topics [0] :: STRING = '0xd78ad95fa46c994b6551d0da85fc275fe613ce37657fb8d5e3d130840159d822' --Swap
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
)
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
event_index,
contract_address,
sender_address AS sender,
to_address AS tx_to,
amount0In,
amount1In,
amount0Out,
amount1Out,
token0,
token1,
CASE
WHEN amount0In <> 0
AND amount1In <> 0
AND amount0Out <> 0 THEN amount1In
WHEN amount0In <> 0 THEN amount0In
WHEN amount1In <> 0 THEN amount1In
END AS amount_in_unadj,
CASE
WHEN amount0Out <> 0 THEN amount0Out
WHEN amount1Out <> 0 THEN amount1Out
END AS amount_out_unadj,
CASE
WHEN amount0In <> 0
AND amount1In <> 0
AND amount0Out <> 0 THEN token1
WHEN amount0In <> 0 THEN token0
WHEN amount1In <> 0 THEN token1
END AS token_in,
CASE
WHEN amount0Out <> 0 THEN token0
WHEN amount1Out <> 0 THEN token1
END AS token_out,
'Swap' AS event_name,
'fraxswap' AS platform,
_log_id,
_inserted_timestamp
FROM
swaps_base
WHERE
token_in <> token_out

View File

@ -1,81 +0,0 @@
version: 2
models:
- name: silver_dex__fraxswap_swaps
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: BLOCK_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_NTZ
- TIMESTAMP_LTZ
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: CONTRACT_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TOKEN_IN
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TOKEN_OUT
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: SENDER
tests:
- not_null:
where: BLOCK_TIMESTAMP > '2021-08-01'
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TX_TO
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: PLATFORM
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- STRING
- VARCHAR
- name: EVENT_INDEX
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: _LOG_ID
tests:
- not_null
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null
- name: ORIGIN_FROM_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: ORIGIN_TO_ADDRESS
tests:
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+

View File

@ -1,51 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = 'pool_address',
tags = ['silver_dex','defi','dex','curated']
) }}
WITH contract_deployments AS (
SELECT
tx_hash,
block_number,
block_timestamp,
from_address AS deployer_address,
to_address AS contract_address,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__fact_traces') }}
WHERE
from_address IN (
'0x63ae536fec0b57bdeb1fd6a893191b4239f61bff',
'0xa98242820ebf3a405d265ccd22a4ea8f64afb281',
'0xb5574750a786a37e300a916974ecd63f93fc6754'
)
AND TYPE ILIKE 'create%'
AND tx_succeeded
AND trace_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
qualify(ROW_NUMBER() over(PARTITION BY to_address
ORDER BY
block_timestamp ASC)) = 1
)
SELECT
tx_hash,
block_number,
block_timestamp,
deployer_address,
contract_address AS pool_address,
_inserted_timestamp
FROM
contract_deployments

View File

@ -1,11 +0,0 @@
version: 2
models:
- name: silver_dex__hashflow_pools
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- POOL_ADDRESS
columns:
- name: POOL_ADDRESS
tests:
- not_null

View File

@ -1,185 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = 'block_number',
cluster_by = ['block_timestamp::DATE'],
tags = ['silver_dex','defi','dex','curated']
) }}
WITH pools AS (
SELECT
pool_address
FROM
{{ ref('silver_dex__hashflow_pools') }}
),
router_swaps_base AS (
SELECT
l.block_number,
l.block_timestamp,
l.tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
l.event_index,
l.contract_address,
regexp_substr_all(SUBSTR(l.data, 3, len(l.data)), '.{64}') AS segmented_data,
CONCAT('0x', SUBSTR(segmented_data [1] :: STRING, 25, 40)) AS account_address,
CONCAT('0x', SUBSTR(segmented_data [3] :: STRING, 25, 40)) AS tokenIn,
CONCAT('0x', SUBSTR(segmented_data [4] :: STRING, 25, 40)) AS tokenOut,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [5] :: STRING
)
) AS amountIn,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [6] :: STRING
)
) AS amountOut,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__fact_event_logs') }}
l
INNER JOIN pools p
ON l.contract_address = p.pool_address
WHERE
l.topics [0] :: STRING = '0xb709ddcc6550418e9b89df1f4938071eeaa3f6376309904c77e15d46b16066f5' --swap
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
),
swaps_base AS (
SELECT
l.block_number,
l.block_timestamp,
l.tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
l.event_index,
l.contract_address,
regexp_substr_all(SUBSTR(l.data, 3, len(l.data)), '.{64}') AS segmented_data,
CONCAT('0x', SUBSTR(segmented_data [0] :: STRING, 25, 40)) AS account_address,
CONCAT('0x', SUBSTR(segmented_data [2] :: STRING, 25, 40)) AS tokenIn,
CONCAT('0x', SUBSTR(segmented_data [3] :: STRING, 25, 40)) AS tokenOut,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [4] :: STRING
)
) AS amountIn,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [5] :: STRING
)
) AS amountOut,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__fact_event_logs') }}
l
INNER JOIN pools p
ON l.contract_address = p.pool_address
WHERE
l.topics [0] :: STRING = '0x8cf3dec1929508e5677d7db003124e74802bfba7250a572205a9986d86ca9f1e' --swap
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
),
FINAL AS (
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
event_index,
contract_address,
origin_from_address AS sender,
account_address AS tx_to,
tokenIn AS token_in,
tokenOut AS token_out,
amountIn AS amount_in_unadj,
amountOut AS amount_out_unadj,
'Swap' AS event_name,
'hashflow' AS platform,
_log_id,
_inserted_timestamp
FROM
router_swaps_base
UNION ALL
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
event_index,
contract_address,
origin_from_address AS sender,
account_address AS tx_to,
tokenIn AS token_in,
tokenOut AS token_out,
amountIn AS amount_in_unadj,
amountOut AS amount_out_unadj,
'Swap' AS event_name,
'hashflow' AS platform,
_log_id,
_inserted_timestamp
FROM
swaps_base
)
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
event_index,
contract_address,
sender,
tx_to,
CASE
WHEN token_in = '0x0000000000000000000000000000000000000000' THEN '0xbb4cdb9cbd36b01bd1cbaebf2de08d9173bc095c'
ELSE token_in
END AS token_in,
CASE
WHEN token_out = '0x0000000000000000000000000000000000000000' THEN '0xbb4cdb9cbd36b01bd1cbaebf2de08d9173bc095c'
ELSE token_out
END AS token_out,
amount_in_unadj,
amount_out_unadj,
event_name,
platform,
_log_id,
_inserted_timestamp
FROM
FINAL

View File

@ -1,81 +0,0 @@
version: 2
models:
- name: silver_dex__hashflow_swaps
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: BLOCK_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_NTZ
- TIMESTAMP_LTZ
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: CONTRACT_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TOKEN_IN
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TOKEN_OUT
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: SENDER
tests:
- not_null:
where: BLOCK_TIMESTAMP > '2021-08-01'
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TX_TO
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: PLATFORM
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- STRING
- VARCHAR
- name: EVENT_INDEX
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: _LOG_ID
tests:
- not_null
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null
- name: ORIGIN_FROM_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: ORIGIN_TO_ADDRESS
tests:
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+

View File

@ -1,51 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = "pool_address",
tags = ['silver_dex','defi','dex','curated']
) }}
WITH contract_deployments AS (
SELECT
tx_hash,
block_number,
block_timestamp,
regexp_substr_all(SUBSTR(DATA, 3, len(DATA)), '.{64}') AS segmented_data,
origin_from_address AS deployer_address,
CONCAT('0x', SUBSTR(segmented_data [0] :: STRING, 25, 40)) AS pool_address,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__fact_event_logs') }}
WHERE
contract_address = '0xde828fdc3f497f16416d1bb645261c7c6a62dab5'
AND topics [0] :: STRING = '0xdbd2a1ea6808362e6adbec4db4969cbc11e3b0b28fb6c74cb342defaaf1daada'
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
)
SELECT
tx_hash,
block_number,
block_timestamp,
deployer_address,
pool_address,
_log_id,
_inserted_timestamp
FROM
contract_deployments qualify(ROW_NUMBER() over (PARTITION BY pool_address
ORDER BY
_inserted_timestamp DESC)) = 1

View File

@ -1,11 +0,0 @@
version: 2
models:
- name: silver_dex__hashflow_v3_pools
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- POOL_ADDRESS
columns:
- name: POOL_ADDRESS
tests:
- not_null

View File

@ -1,97 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = "block_number",
cluster_by = ['block_timestamp::DATE'],
tags = ['silver_dex','defi','dex','curated']
) }}
WITH pools AS (
SELECT
pool_address
FROM
{{ ref('silver_dex__hashflow_v3_pools') }}
),
swaps AS (
SELECT
l.block_number,
l.block_timestamp,
l.tx_hash,
l.origin_function_signature,
l.origin_from_address,
l.origin_to_address,
l.event_index,
contract_address,
regexp_substr_all(SUBSTR(DATA, 3, len(DATA)), '.{64}') AS segmented_data,
CONCAT('0x', SUBSTR(segmented_data [0] :: STRING, 25, 40)) AS trader_address,
CONCAT('0x', SUBSTR(segmented_data [1] :: STRING, 25, 40)) AS effective_trader_address,
CONCAT(
'0x',
segmented_data [2] :: STRING
) AS txid,
CONCAT('0x', SUBSTR(segmented_data [3] :: STRING, 25, 40)) AS tokenIn,
CONCAT('0x', SUBSTR(segmented_data [4] :: STRING, 25, 40)) AS tokenOut,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [5] :: STRING
)
) AS amountIn,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [6] :: STRING
)
) AS amountOut,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__fact_event_logs') }}
l
INNER JOIN pools p
ON l.contract_address = p.pool_address
WHERE
l.topics [0] :: STRING = '0x34f57786fb01682fb4eec88d340387ef01a168fe345ea5b76f709d4e560c10eb' --Trade
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
)
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
event_index,
contract_address,
effective_trader_address AS sender,
trader_address AS tx_to,
txid,
CASE
WHEN tokenIn = '0x0000000000000000000000000000000000000000' THEN '0xbb4cdb9cbd36b01bd1cbaebf2de08d9173bc095c'
ELSE tokenIn
END AS token_in,
CASE
WHEN tokenOut = '0x0000000000000000000000000000000000000000' THEN '0xbb4cdb9cbd36b01bd1cbaebf2de08d9173bc095c'
ELSE tokenOut
END AS token_out,
amountIn AS amount_in_unadj,
amountOut AS amount_out_unadj,
'Trade' AS event_name,
'hashflow-v3' AS platform,
_log_id,
_inserted_timestamp
FROM
swaps

View File

@ -1,81 +0,0 @@
version: 2
models:
- name: silver_dex__hashflow_v3_swaps
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: BLOCK_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_NTZ
- TIMESTAMP_LTZ
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: CONTRACT_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TOKEN_IN
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TOKEN_OUT
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: SENDER
tests:
- not_null:
where: BLOCK_TIMESTAMP > '2021-08-01'
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TX_TO
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: PLATFORM
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- STRING
- VARCHAR
- name: EVENT_INDEX
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: _LOG_ID
tests:
- not_null
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null
- name: ORIGIN_FROM_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: ORIGIN_TO_ADDRESS
tests:
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+

View File

@ -1,69 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = 'pool_address',
tags = ['silver_dex','defi','dex','curated']
) }}
WITH pool_creation AS (
SELECT
block_number,
block_timestamp,
tx_hash,
event_index,
contract_address,
regexp_substr_all(SUBSTR(DATA, 3, len(DATA)), '.{64}') AS segmented_data,
CONCAT('0x', SUBSTR(topics [1] :: STRING, 27, 40)) AS token0,
CONCAT('0x', SUBSTR(topics [2] :: STRING, 27, 40)) AS token1,
CONCAT('0x', SUBSTR(segmented_data [0] :: STRING, 25, 40)) AS pool_address,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [1] :: STRING
)
) AS ampBps,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [2] :: STRING
)
) AS totalPool,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref ('core__fact_event_logs') }}
WHERE
contract_address = '0x878dfe971d44e9122048308301f540910bbd934c' --dynamic fee factory
AND topics [0] :: STRING = '0xfc574402c445e75f2b79b67884ff9c662244dce454c5ae68935fcd0bebb7c8ff' --created pool
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
)
SELECT
block_number,
block_timestamp,
tx_hash,
contract_address,
event_index,
token0,
token1,
pool_address,
ampBps AS amp_bps,
totalPool AS total_pool,
_log_id,
_inserted_timestamp
FROM
pool_creation qualify(ROW_NUMBER() over (PARTITION BY pool_address
ORDER BY
_inserted_timestamp DESC)) = 1

View File

@ -1,23 +0,0 @@
version: 2
models:
- name: silver_dex__kyberswap_v1_dynamic_pools
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- POOL_ADDRESS
columns:
- name: POOL_ADDRESS
tests:
- not_null
- name: TOKEN0
tests:
- not_null
- name: TOKEN1
tests:
- not_null
- name: _INSERTED_TIMESTAMP
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_NTZ
- TIMESTAMP_LTZ

View File

@ -1,130 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = 'block_number',
cluster_by = ['block_timestamp::DATE'],
tags = ['silver_dex','defi','dex','curated']
) }}
WITH pools AS (
SELECT
pool_address,
token0,
token1
FROM
{{ ref('silver_dex__kyberswap_v1_dynamic_pools') }}
),
swaps_base AS (
SELECT
l.block_number,
l.block_timestamp,
l.tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
l.event_index,
l.contract_address,
regexp_substr_all(SUBSTR(l.data, 3, len(l.data)), '.{64}') AS segmented_data,
CONCAT('0x', SUBSTR(l.topics [1] :: STRING, 27, 40)) AS sender_address,
CONCAT('0x', SUBSTR(l.topics [2] :: STRING, 27, 40)) AS to_address,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [0] :: STRING
)
) AS amount0In,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [1] :: STRING
)
) AS amount1In,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [2] :: STRING
)
) AS amount0Out,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [3] :: STRING
)
) AS amount1Out,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [4] :: STRING
)
) AS feeInPrecision,
token0,
token1,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__fact_event_logs') }}
l
INNER JOIN pools p
ON p.pool_address = l.contract_address
WHERE
l.topics [0] :: STRING = '0x606ecd02b3e3b4778f8e97b2e03351de14224efaa5fa64e62200afc9395c2499' --Dynamic Swap
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
)
SELECT
block_number,
block_timestamp,
tx_hash,
contract_address,
origin_function_signature,
origin_from_address,
origin_to_address,
event_index,
sender_address AS sender,
to_address AS tx_to,
amount0In,
amount1In,
amount0Out,
amount1Out,
feeInPrecision AS fee_in_precision,
token0,
token1,
CASE
WHEN amount0In <> 0
AND amount1In <> 0
AND amount0Out <> 0 THEN amount1In
WHEN amount0In <> 0 THEN amount0In
WHEN amount1In <> 0 THEN amount1In
END AS amount_in_unadj,
CASE
WHEN amount0Out <> 0 THEN amount0Out
WHEN amount1Out <> 0 THEN amount1Out
END AS amount_out_unadj,
CASE
WHEN amount0In <> 0
AND amount1In <> 0
AND amount0Out <> 0 THEN token1
WHEN amount0In <> 0 THEN token0
WHEN amount1In <> 0 THEN token1
END AS token_in,
CASE
WHEN amount0Out <> 0 THEN token0
WHEN amount1Out <> 0 THEN token1
END AS token_out,
'Dynamic Swap' AS event_name,
'kyberswap-v1' AS platform,
_log_id,
_inserted_timestamp
FROM
swaps_base
WHERE
token_in <> token_out

View File

@ -1,81 +0,0 @@
version: 2
models:
- name: silver_dex__kyberswap_v1_dynamic_swaps
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: BLOCK_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_NTZ
- TIMESTAMP_LTZ
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: CONTRACT_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TOKEN_IN
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TOKEN_OUT
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: SENDER
tests:
- not_null:
where: BLOCK_TIMESTAMP > '2021-08-01'
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TX_TO
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: PLATFORM
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- STRING
- VARCHAR
- name: EVENT_INDEX
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: _LOG_ID
tests:
- not_null
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null
- name: ORIGIN_FROM_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: ORIGIN_TO_ADDRESS
tests:
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+

View File

@ -1,75 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = 'pool_address',
tags = ['silver_dex','defi','dex','curated']
) }}
WITH pool_creation AS (
SELECT
block_number,
block_timestamp,
tx_hash,
event_index,
contract_address,
regexp_substr_all(SUBSTR(DATA, 3, len(DATA)), '.{64}') AS segmented_data,
CONCAT('0x', SUBSTR(topics [1] :: STRING, 27, 40)) AS token0,
CONCAT('0x', SUBSTR(topics [2] :: STRING, 27, 40)) AS token1,
CONCAT('0x', SUBSTR(segmented_data [0] :: STRING, 25, 40)) AS pool_address,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [1] :: STRING
)
) AS ampBps,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [2] :: STRING
)
) AS feeUnits,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [3] :: STRING
)
) AS totalPool,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref ('core__fact_event_logs') }}
WHERE
contract_address = LOWER('0x1c758aF0688502e49140230F6b0EBd376d429be5') --static pool factory
AND topics [0] :: STRING = '0xb6bce363b712c921bead4bcc977289440eb6172eb89e258e3a25bd49ca806de6' --create pool
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
)
SELECT
block_number,
block_timestamp,
tx_hash,
contract_address,
event_index,
token0,
token1,
pool_address,
ampBps AS amp_bps,
feeUnits AS fee_units,
totalPool AS total_pool,
_log_id,
_inserted_timestamp
FROM
pool_creation qualify(ROW_NUMBER() over (PARTITION BY pool_address
ORDER BY
_inserted_timestamp DESC)) = 1

View File

@ -1,23 +0,0 @@
version: 2
models:
- name: silver_dex__kyberswap_v1_static_pools
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- POOL_ADDRESS
columns:
- name: POOL_ADDRESS
tests:
- not_null
- name: TOKEN0
tests:
- not_null
- name: TOKEN1
tests:
- not_null
- name: _INSERTED_TIMESTAMP
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_NTZ
- TIMESTAMP_LTZ

View File

@ -1,130 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = 'block_number',
cluster_by = ['block_timestamp::DATE'],
tags = ['silver_dex','defi','dex','curated']
) }}
WITH pools AS (
SELECT
pool_address,
token0,
token1
FROM
{{ ref('silver_dex__kyberswap_v1_static_pools') }}
),
swaps_base AS (
SELECT
l.block_number,
l.block_timestamp,
l.tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
l.event_index,
l.contract_address,
regexp_substr_all(SUBSTR(l.data, 3, len(l.data)), '.{64}') AS segmented_data,
CONCAT('0x', SUBSTR(l.topics [1] :: STRING, 27, 40)) AS sender_address,
CONCAT('0x', SUBSTR(l.topics [2] :: STRING, 27, 40)) AS to_address,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [0] :: STRING
)
) AS amount0In,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [1] :: STRING
)
) AS amount1In,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [2] :: STRING
)
) AS amount0Out,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [3] :: STRING
)
) AS amount1Out,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [4] :: STRING
)
) AS feeInPrecision,
token0,
token1,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__fact_event_logs') }}
l
INNER JOIN pools p
ON p.pool_address = l.contract_address
WHERE
l.topics [0] :: STRING = '0x606ecd02b3e3b4778f8e97b2e03351de14224efaa5fa64e62200afc9395c2499' -- static swap
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
)
SELECT
block_number,
block_timestamp,
tx_hash,
contract_address,
origin_function_signature,
origin_from_address,
origin_to_address,
event_index,
sender_address AS sender,
to_address AS tx_to,
amount0In,
amount1In,
amount0Out,
amount1Out,
feeInPrecision AS fee_in_precision,
token0,
token1,
CASE
WHEN amount0In <> 0
AND amount1In <> 0
AND amount0Out <> 0 THEN amount1In
WHEN amount0In <> 0 THEN amount0In
WHEN amount1In <> 0 THEN amount1In
END AS amount_in_unadj,
CASE
WHEN amount0Out <> 0 THEN amount0Out
WHEN amount1Out <> 0 THEN amount1Out
END AS amount_out_unadj,
CASE
WHEN amount0In <> 0
AND amount1In <> 0
AND amount0Out <> 0 THEN token1
WHEN amount0In <> 0 THEN token0
WHEN amount1In <> 0 THEN token1
END AS token_in,
CASE
WHEN amount0Out <> 0 THEN token0
WHEN amount1Out <> 0 THEN token1
END AS token_out,
'Static Swap' AS event_name,
'kyberswap-v1' AS platform,
_log_id,
_inserted_timestamp
FROM
swaps_base
WHERE
token_in <> token_out

View File

@ -1,81 +0,0 @@
version: 2
models:
- name: silver_dex__kyberswap_v1_static_swaps
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: BLOCK_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_NTZ
- TIMESTAMP_LTZ
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: CONTRACT_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TOKEN_IN
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TOKEN_OUT
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: SENDER
tests:
- not_null:
where: BLOCK_TIMESTAMP > '2021-08-01'
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TX_TO
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: PLATFORM
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- STRING
- VARCHAR
- name: EVENT_INDEX
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: _LOG_ID
tests:
- not_null
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null
- name: ORIGIN_FROM_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: ORIGIN_TO_ADDRESS
tests:
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+

View File

@ -1,65 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = 'pool_address',
tags = ['silver_dex','defi','dex','curated']
) }}
WITH pool_creation AS (
SELECT
block_number,
block_timestamp,
tx_hash,
event_index,
contract_address,
regexp_substr_all(SUBSTR(DATA, 3, len(DATA)), '.{64}') AS segmented_data,
CONCAT('0x', SUBSTR(topics [1] :: STRING, 27, 40)) AS token0,
CONCAT('0x', SUBSTR(topics [2] :: STRING, 27, 40)) AS token1,
TRY_TO_NUMBER(utils.udf_hex_to_int(topics [3] :: STRING)) AS swapFeeUnits,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [0] :: STRING
)
) AS tickDistance,
CONCAT('0x', SUBSTR(segmented_data [1] :: STRING, 25, 40)) AS pool_address,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref ('core__fact_event_logs') }}
WHERE
contract_address = '0x5f1dddbf348ac2fbe22a163e30f99f9ece3dd50a' --Elastic Pool Deployer
AND topics [0] :: STRING = '0x783cca1c0412dd0d695e784568c96da2e9c22ff989357a2e8b1d9b2b4e6b7118' --Create pool
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
)
SELECT
block_number,
block_timestamp,
tx_hash,
contract_address,
event_index,
token0,
token1,
swapFeeUnits AS swap_fee_units,
tickDistance AS tick_distance,
pool_address,
_log_id,
_inserted_timestamp
FROM
pool_creation qualify(ROW_NUMBER() over (PARTITION BY pool_address
ORDER BY
_inserted_timestamp DESC)) = 1

View File

@ -1,23 +0,0 @@
version: 2
models:
- name: silver_dex__kyberswap_v2_elastic_pools
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- POOL_ADDRESS
columns:
- name: POOL_ADDRESS
tests:
- not_null
- name: TOKEN0
tests:
- not_null
- name: TOKEN1
tests:
- not_null
- name: _INSERTED_TIMESTAMP
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_NTZ
- TIMESTAMP_LTZ

View File

@ -1,125 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = 'block_number',
cluster_by = ['block_timestamp::DATE'],
tags = ['silver_dex','defi','dex','curated']
) }}
WITH pools AS (
SELECT
pool_address,
token0,
token1
FROM
{{ ref('silver_dex__kyberswap_v2_elastic_pools') }}
),
swaps_base AS (
SELECT
l.block_number,
l.block_timestamp,
l.tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
l.event_index,
l.contract_address,
regexp_substr_all(SUBSTR(l.data, 3, len(l.data)), '.{64}') AS segmented_data,
CONCAT('0x', SUBSTR(l.topics [1] :: STRING, 27, 40)) AS sender_address,
CONCAT('0x', SUBSTR(l.topics [2] :: STRING, 27, 40)) AS reipient_address,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
's2c',
segmented_data [0] :: STRING
)
) AS deltaQty0,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
's2c',
segmented_data [1] :: STRING
)
) AS deltaQty1,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [2] :: STRING
)
) AS sqrtP,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [3] :: STRING
)
) AS liquidity,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
's2c',
segmented_data [4] :: STRING
)
) AS currentTick,
ABS(GREATEST(deltaQty0, deltaQty1)) AS amountOut,
ABS(LEAST(deltaQty0, deltaQty1)) AS amountIn,
token0,
token1,
CASE
WHEN deltaQty0 < 0 THEN token0
ELSE token1
END AS token_in,
CASE
WHEN deltaQty0 > 0 THEN token0
ELSE token1
END AS token_out,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__fact_event_logs') }}
l
INNER JOIN pools p
ON p.pool_address = l.contract_address
WHERE
topics [0] :: STRING = '0xc42079f94a6350d7e6235f29174924f928cc2ac818eb64fed8004e115fbcca67' -- elastic swap
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
)
SELECT
block_number,
block_timestamp,
tx_hash,
contract_address,
origin_function_signature,
origin_from_address,
origin_to_address,
event_index,
sender_address AS sender,
reipient_address AS tx_to,
deltaQty0 AS delta_qty0,
deltaQty1 AS delta_qty1,
sqrtP AS sqrt_p,
liquidity,
currentTick AS current_tick,
amountIn AS amount_in_unadj,
amountOut AS amount_out_unadj,
token0,
token1,
token_in,
token_out,
'Elastic Swap' AS event_name,
'kyberswap-v2' AS platform,
_log_id,
_inserted_timestamp
FROM
swaps_base
WHERE
token_in <> token_out

View File

@ -1,81 +0,0 @@
version: 2
models:
- name: silver_dex__kyberswap_v2_elastic_swaps
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: BLOCK_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_NTZ
- TIMESTAMP_LTZ
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: CONTRACT_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TOKEN_IN
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TOKEN_OUT
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: SENDER
tests:
- not_null:
where: BLOCK_TIMESTAMP > '2021-08-01'
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TX_TO
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: PLATFORM
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- STRING
- VARCHAR
- name: EVENT_INDEX
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: _LOG_ID
tests:
- not_null
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null
- name: ORIGIN_FROM_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: ORIGIN_TO_ADDRESS
tests:
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+

View File

@ -1,117 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = 'block_number',
cluster_by = ['block_timestamp::DATE'],
tags = ['silver_dex','defi','dex','curated']
) }}
WITH swaps_base AS (
SELECT
l.block_number,
l.block_timestamp,
l.tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
l.event_index,
l.contract_address,
regexp_substr_all(SUBSTR(l.data, 3, len(l.data)), '.{64}') AS segmented_data,
ARRAY_SIZE(segmented_data) AS data_count,
CASE
WHEN data_count = 6 THEN CONCAT('0x', SUBSTR(segmented_data [0] :: STRING, 25, 40))
END AS sender_address,
CASE
WHEN data_count = 6 THEN CONCAT('0x', SUBSTR(segmented_data [1] :: STRING, 25, 40))
WHEN data_count = 5 THEN CONCAT('0x', SUBSTR(segmented_data [0] :: STRING, 25, 40))
END AS tokenIn,
CASE
WHEN data_count = 6 THEN CONCAT('0x', SUBSTR(segmented_data [2] :: STRING, 25, 40))
WHEN data_count = 5 THEN CONCAT('0x', SUBSTR(segmented_data [1] :: STRING, 25, 40))
END AS tokenOut,
CASE
WHEN data_count = 6 THEN TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [3] :: STRING
)
)
WHEN data_count = 5 THEN TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [2] :: STRING
)
)
END AS amountIn,
CASE
WHEN data_count = 6 THEN TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [4] :: STRING
)
)
WHEN data_count = 5 THEN TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [3] :: STRING
)
)
END AS amountOut,
CASE
WHEN data_count = 6 THEN TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [5] :: STRING
)
)
WHEN data_count = 5 THEN TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [4] :: STRING
)
)
END AS fee,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__fact_event_logs') }}
l
WHERE
contract_address = '0xa5abfb56a78d2bd4689b25b8a77fd49bb0675874' --router
AND topics [0] :: STRING = '0xd6d34547c69c5ee3d2667625c188acf1006abb93e0ee7cf03925c67cf7760413' --swap
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
)
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
event_index,
contract_address,
COALESCE(
sender_address,
origin_from_address
) AS sender,
origin_from_address AS tx_to,
tokenIn AS token_in,
tokenOut AS token_out,
amountIn AS amount_in_unadj,
amountOut AS amount_out_unadj,
fee,
'Swap' AS event_name,
'level-finance' AS platform,
_log_id,
_inserted_timestamp
FROM
swaps_base

View File

@ -1,81 +0,0 @@
version: 2
models:
- name: silver_dex__levelfi_swaps
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: BLOCK_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_NTZ
- TIMESTAMP_LTZ
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: CONTRACT_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TOKEN_IN
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TOKEN_OUT
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: SENDER
tests:
- not_null:
where: BLOCK_TIMESTAMP > '2021-08-01'
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TX_TO
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: PLATFORM
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- STRING
- VARCHAR
- name: EVENT_INDEX
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: _LOG_ID
tests:
- not_null
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null
- name: ORIGIN_FROM_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: ORIGIN_TO_ADDRESS
tests:
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+

View File

@ -1,61 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = 'pool_address',
tags = ['silver_dex','defi','dex','curated']
) }}
WITH pool_creation AS (
SELECT
block_number,
block_timestamp,
tx_hash,
event_index,
contract_address,
regexp_substr_all(SUBSTR(DATA, 3, len(DATA)), '.{64}') AS segmented_data,
CONCAT('0x', SUBSTR(topics [1] :: STRING, 27, 40)) AS token0,
CONCAT('0x', SUBSTR(topics [2] :: STRING, 27, 40)) AS token1,
CONCAT('0x', SUBSTR(segmented_data [0] :: STRING, 25, 40)) AS pool_address,
utils.udf_hex_to_int(
segmented_data [1] :: STRING
) :: INTEGER AS pool_id,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref ('core__fact_event_logs') }}
WHERE
contract_address = '0xbcfccbde45ce874adcb698cc183debcf17952812' --factory
AND topics [0] :: STRING = '0x0d3648bd0f6ba80134a33ba9275ac585d9d315f0ad8355cddefde31afa28d0e9' --PairCreated
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
)
SELECT
block_number,
block_timestamp,
tx_hash,
contract_address,
event_index,
token0,
token1,
pool_address,
pool_id,
_log_id,
_inserted_timestamp
FROM
pool_creation qualify(ROW_NUMBER() over (PARTITION BY pool_address
ORDER BY
_inserted_timestamp DESC)) = 1

View File

@ -1,23 +0,0 @@
version: 2
models:
- name: silver_dex__pancakeswap_v1_pools
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- POOL_ADDRESS
columns:
- name: POOL_ADDRESS
tests:
- not_null
- name: TOKEN0
tests:
- not_null
- name: TOKEN1
tests:
- not_null
- name: _INSERTED_TIMESTAMP
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_NTZ
- TIMESTAMP_LTZ

View File

@ -1,123 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = 'block_number',
cluster_by = ['block_timestamp::DATE'],
tags = ['silver_dex','defi','dex','curated']
) }}
WITH pools AS (
SELECT
pool_address,
token0,
token1
FROM
{{ ref('silver_dex__pancakeswap_v1_pools') }}
),
swaps_base AS (
SELECT
block_number,
origin_function_signature,
origin_from_address,
origin_to_address,
block_timestamp,
tx_hash,
event_index,
contract_address,
regexp_substr_all(SUBSTR(DATA, 3, len(DATA)), '.{64}') AS segmented_data,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [0] :: STRING
)
) AS amount0In,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [1] :: STRING
)
) AS amount1In,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [2] :: STRING
)
) AS amount0Out,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [3] :: STRING
)
) AS amount1Out,
CONCAT('0x', SUBSTR(topics [1] :: STRING, 27, 40)) AS sender,
CONCAT('0x', SUBSTR(topics [2] :: STRING, 27, 40)) AS tx_to,
token0,
token1,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__fact_event_logs') }}
INNER JOIN pools p
ON p.pool_address = contract_address
WHERE
topics [0] :: STRING = '0xd78ad95fa46c994b6551d0da85fc275fe613ce37657fb8d5e3d130840159d822'
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
)
SELECT
block_number,
block_timestamp,
origin_function_signature,
origin_from_address,
origin_to_address,
tx_hash,
event_index,
contract_address,
sender,
tx_to,
amount0In,
amount1In,
amount0Out,
amount1Out,
token0,
token1,
CASE
WHEN amount0In <> 0
AND amount1In <> 0
AND amount0Out <> 0 THEN amount1In
WHEN amount0In <> 0 THEN amount0In
WHEN amount1In <> 0 THEN amount1In
END AS amount_in_unadj,
CASE
WHEN amount0Out <> 0 THEN amount0Out
WHEN amount1Out <> 0 THEN amount1Out
END AS amount_out_unadj,
CASE
WHEN amount0In <> 0
AND amount1In <> 0
AND amount0Out <> 0 THEN token1
WHEN amount0In <> 0 THEN token0
WHEN amount1In <> 0 THEN token1
END AS token_in,
CASE
WHEN amount0Out <> 0 THEN token0
WHEN amount1Out <> 0 THEN token1
END AS token_out,
'Swap' AS event_name,
'pancakeswap-v1' AS platform,
_log_id,
_inserted_timestamp
FROM
swaps_base
WHERE
token_in <> token_out

View File

@ -1,116 +0,0 @@
version: 2
models:
- name: silver_dex__pancakeswap_v1_swaps
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: BLOCK_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 2
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_NTZ
- TIMESTAMP_LTZ
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: CONTRACT_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: AMOUNT_IN_UNADJ
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: AMOUNT_IN
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: AMOUNT_OUT_UNADJ
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: AMOUNT_OUT
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: TOKEN_IN
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TOKEN_OUT
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: SYMBOL_IN
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- STRING
- VARCHAR
- name: SYMBOL_OUT
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- STRING
- VARCHAR
- name: TX_TO
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: PLATFORM
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- STRING
- VARCHAR
- name: EVENT_INDEX
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: _LOG_ID
tests:
- not_null
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null
- name: ORIGIN_FROM_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: ORIGIN_TO_ADDRESS
tests:
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+

View File

@ -1,65 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = 'pool_address',
tags = ['silver_dex','defi','dex','curated']
) }}
WITH pool_creation AS (
SELECT
block_number,
block_timestamp,
tx_hash,
event_index,
contract_address,
regexp_substr_all(SUBSTR(DATA, 3, len(DATA)), '.{64}') AS segmented_data,
CONCAT('0x', SUBSTR(topics [1] :: STRING, 27, 40)) AS token0,
CONCAT('0x', SUBSTR(topics [2] :: STRING, 27, 40)) AS token1,
CONCAT('0x', SUBSTR(segmented_data [0] :: STRING, 25, 40)) AS pool_address,
utils.udf_hex_to_int(
segmented_data [1] :: STRING
) :: INTEGER AS pool_id,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref ('core__fact_event_logs') }}
WHERE
contract_address IN (
'0xca143ce32fe78f1f7019d7d551a6402fc5350c73',
'0x7b13d1d2a1fa28b16862ebac6e3c52fa9c8d753e',
'0x1f830fb91094a0e87c0a80150aa0af3805456090'
) --factory
AND topics [0] :: STRING = '0x0d3648bd0f6ba80134a33ba9275ac585d9d315f0ad8355cddefde31afa28d0e9' --PairCreated
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
)
SELECT
block_number,
block_timestamp,
tx_hash,
contract_address,
event_index,
token0,
token1,
pool_address,
pool_id,
_log_id,
_inserted_timestamp
FROM
pool_creation qualify(ROW_NUMBER() over (PARTITION BY pool_address
ORDER BY
_inserted_timestamp DESC)) = 1

View File

@ -1,23 +0,0 @@
version: 2
models:
- name: silver_dex__pancakeswap_v2_amm_pools
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- POOL_ADDRESS
columns:
- name: POOL_ADDRESS
tests:
- not_null
- name: TOKEN0
tests:
- not_null
- name: TOKEN1
tests:
- not_null
- name: _INSERTED_TIMESTAMP
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_NTZ
- TIMESTAMP_LTZ

View File

@ -1,123 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = "block_number",
cluster_by = ['block_timestamp::DATE'],
tags = ['silver_dex','defi','dex','curated']
) }}
WITH pools AS (
SELECT
pool_address,
token0,
token1
FROM
{{ ref('silver_dex__pancakeswap_v2_amm_pools') }}
),
swaps_base AS (
SELECT
block_number,
origin_function_signature,
origin_from_address,
origin_to_address,
block_timestamp,
tx_hash,
event_index,
contract_address,
regexp_substr_all(SUBSTR(DATA, 3, len(DATA)), '.{64}') AS segmented_data,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [0] :: STRING
)
) AS amount0In,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [1] :: STRING
)
) AS amount1In,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [2] :: STRING
)
) AS amount0Out,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [3] :: STRING
)
) AS amount1Out,
CONCAT('0x', SUBSTR(topics [1] :: STRING, 27, 40)) AS sender,
CONCAT('0x', SUBSTR(topics [2] :: STRING, 27, 40)) AS tx_to,
token0,
token1,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__fact_event_logs') }}
INNER JOIN pools p
ON p.pool_address = contract_address
WHERE
topics [0] :: STRING = '0xd78ad95fa46c994b6551d0da85fc275fe613ce37657fb8d5e3d130840159d822'
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
)
SELECT
block_number,
block_timestamp,
origin_function_signature,
origin_from_address,
origin_to_address,
tx_hash,
event_index,
contract_address,
sender,
tx_to,
amount0In,
amount1In,
amount0Out,
amount1Out,
token0,
token1,
CASE
WHEN amount0In <> 0
AND amount1In <> 0
AND amount0Out <> 0 THEN amount1In
WHEN amount0In <> 0 THEN amount0In
WHEN amount1In <> 0 THEN amount1In
END AS amount_in_unadj,
CASE
WHEN amount0Out <> 0 THEN amount0Out
WHEN amount1Out <> 0 THEN amount1Out
END AS amount_out_unadj,
CASE
WHEN amount0In <> 0
AND amount1In <> 0
AND amount0Out <> 0 THEN token1
WHEN amount0In <> 0 THEN token0
WHEN amount1In <> 0 THEN token1
END AS token_in,
CASE
WHEN amount0Out <> 0 THEN token0
WHEN amount1Out <> 0 THEN token1
END AS token_out,
'Swap' AS event_name,
'pancakeswap-v2' AS platform,
_log_id,
_inserted_timestamp
FROM
swaps_base
WHERE
token_in <> token_out

View File

@ -1,116 +0,0 @@
version: 2
models:
- name: silver_dex__pancakeswap_v2_amm_swaps
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: BLOCK_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 2
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_NTZ
- TIMESTAMP_LTZ
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: CONTRACT_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: AMOUNT_IN_UNADJ
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: AMOUNT_IN
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: AMOUNT_OUT_UNADJ
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: AMOUNT_OUT
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: TOKEN_IN
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TOKEN_OUT
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: SYMBOL_IN
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- STRING
- VARCHAR
- name: SYMBOL_OUT
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- STRING
- VARCHAR
- name: TX_TO
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: PLATFORM
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- STRING
- VARCHAR
- name: EVENT_INDEX
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: _LOG_ID
tests:
- not_null
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null
- name: ORIGIN_FROM_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: ORIGIN_TO_ADDRESS
tests:
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+

View File

@ -1,104 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = 'block_number',
cluster_by = ['block_timestamp::DATE'],
tags = ['silver_dex','defi','dex','curated']
) }}
WITH swaps_base AS (
SELECT
l.block_number,
l.block_timestamp,
l.tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
l.event_index,
l.contract_address,
regexp_substr_all(SUBSTR(l.data, 3, len(l.data)), '.{64}') AS segmented_data,
CONCAT('0x', SUBSTR(l.topics [1] :: STRING, 27, 40)) AS user_address,
CONCAT('0x', SUBSTR(l.topics [2] :: STRING, 27, 40)) AS mm_address,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [0] :: STRING
)
) AS nonce,
CONCAT('0x', SUBSTR(segmented_data [1] :: STRING, 25, 40)) AS mmTreasury,
CONCAT('0x', SUBSTR(segmented_data [2] :: STRING, 25, 40)) AS baseToken1,
CONCAT('0x', SUBSTR(segmented_data [3] :: STRING, 25, 40)) AS quoteToken1,
CASE
WHEN baseToken1 = '0xeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee' THEN '0xbb4cdb9cbd36b01bd1cbaebf2de08d9173bc095c'
ELSE baseToken1
END AS baseToken,
CASE
WHEN quoteToken1 = '0xeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee' THEN '0xbb4cdb9cbd36b01bd1cbaebf2de08d9173bc095c'
ELSE quoteToken1
END AS quoteToken,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [4] :: STRING
)
) AS baseTokenAmount,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [5] :: STRING
)
) AS quoteTokenAmount,
baseToken AS token_in,
quoteToken AS token_out,
baseTokenAmount AS token_in_amount,
quoteTokenAmount AS token_out_amount,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__fact_event_logs') }}
l
WHERE
contract_address = '0xfeacb05b373f1a08e68235ba7fc92636b92ced01' --router
AND topics [0] :: STRING = '0xe7d6f812e1a54298ddef0b881cd08a4d452d9de35eb18b5145aa580fdda18b26' --swap
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
)
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
event_index,
contract_address,
user_address AS sender,
user_address AS tx_to,
mm_address,
nonce,
mmTreasury,
baseToken,
quoteToken,
baseTokenAmount,
quoteTokenAmount,
token_in,
token_out,
token_in_amount AS amount_in_unadj,
token_out_amount AS amount_out_unadj,
'Swap' AS event_name,
'pancakeswap-v2' AS platform,
_log_id,
_inserted_timestamp
FROM
swaps_base

View File

@ -1,81 +0,0 @@
version: 2
models:
- name: silver_dex__pancakeswap_v2_mm_swaps
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: BLOCK_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_NTZ
- TIMESTAMP_LTZ
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: CONTRACT_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TOKEN_IN
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TOKEN_OUT
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: SENDER
tests:
- not_null:
where: BLOCK_TIMESTAMP > '2021-08-01'
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TX_TO
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: PLATFORM
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- STRING
- VARCHAR
- name: EVENT_INDEX
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: _LOG_ID
tests:
- not_null
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null
- name: ORIGIN_FROM_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: ORIGIN_TO_ADDRESS
tests:
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+

View File

@ -1,79 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = 'pool_address',
tags = ['silver_dex','defi','dex','curated']
) }}
WITH pools AS (
SELECT
block_number,
block_timestamp,
tx_hash,
event_index,
contract_address,
regexp_substr_all(SUBSTR(DATA, 3, len(DATA)), '.{64}') AS segmented_data,
CONCAT('0x', SUBSTR(topics [1] :: STRING, 27, 40)) AS pool_address,
CASE
WHEN contract_address = '0x36bbb126e75351c0dfb651e39b38fe0bc436ffd2' THEN CONCAT('0x', SUBSTR(topics [2] :: STRING, 27, 40))
WHEN contract_address = '0x25a55f9f2279a54951133d503490342b50e5cd15' THEN CONCAT('0x', SUBSTR(segmented_data [0] :: STRING, 25, 40))
END AS tokenA,
CASE
WHEN contract_address = '0x36bbb126e75351c0dfb651e39b38fe0bc436ffd2' THEN CONCAT('0x', SUBSTR(topics [3] :: STRING, 27, 40))
WHEN contract_address = '0x25a55f9f2279a54951133d503490342b50e5cd15' THEN CONCAT('0x', SUBSTR(segmented_data [1] :: STRING, 25, 40))
END AS tokenB,
CASE
WHEN contract_address = '0x36bbb126e75351c0dfb651e39b38fe0bc436ffd2' THEN NULL
WHEN contract_address = '0x25a55f9f2279a54951133d503490342b50e5cd15' THEN CONCAT('0x', SUBSTR(segmented_data [2] :: STRING, 25, 40))
END AS tokenC,
CASE
WHEN contract_address = '0x36bbb126e75351c0dfb651e39b38fe0bc436ffd2' THEN NULL
WHEN contract_address = '0x25a55f9f2279a54951133d503490342b50e5cd15' THEN CONCAT('0x', SUBSTR(segmented_data [3] :: STRING, 25, 40))
END AS lp,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref ('core__fact_event_logs') }}
WHERE
contract_address IN (
'0x25a55f9f2279a54951133d503490342b50e5cd15',
'0x36bbb126e75351c0dfb651e39b38fe0bc436ffd2'
) --factory
AND topics [0] :: STRING IN (
'0xa9551fb056fc743efe2a0a34e39f9769ad10166520df7843c09a66f82e148b97',
'0x48dc7a1b156fe3e70ed5ed0afcb307661905edf536f15bb5786e327ea1933532'
) -- swap
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
)
SELECT
block_number,
block_timestamp,
tx_hash,
event_index,
contract_address,
pool_address,
tokenA,
tokenB,
tokenC,
lp,
_log_id,
_inserted_timestamp
FROM
pools qualify(ROW_NUMBER() over (PARTITION BY pool_address
ORDER BY
_inserted_timestamp DESC)) = 1

View File

@ -1,23 +0,0 @@
version: 2
models:
- name: silver_dex__pancakeswap_v2_ss_pools
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- POOL_ADDRESS
columns:
- name: POOL_ADDRESS
tests:
- not_null
- name: TOKENA
tests:
- not_null
- name: TOKENB
tests:
- not_null
- name: _INSERTED_TIMESTAMP
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_NTZ
- TIMESTAMP_LTZ

View File

@ -1,108 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = 'block_number',
cluster_by = ['block_timestamp::DATE'],
tags = ['silver_dex','defi','dex','curated']
) }}
WITH pools AS (
SELECT
pool_address,
tokenA,
tokenB
FROM
{{ ref('silver_dex__pancakeswap_v2_ss_pools') }}
),
swaps_base AS (
SELECT
block_number,
origin_function_signature,
origin_from_address,
origin_to_address,
block_timestamp,
tx_hash,
event_index,
contract_address,
regexp_substr_all(SUBSTR(DATA, 3, len(DATA)), '.{64}') AS segmented_data,
CONCAT('0x', SUBSTR(topics [1] :: STRING, 27, 40)) AS buyer_address,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [0] :: STRING
)
) AS sold_id,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [1] :: STRING
)
) AS tokens_sold,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [2] :: STRING
)
) AS bought_id,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [3] :: STRING
)
) AS tokens_bought,
tokens_sold AS amountIn,
tokens_bought AS amountOut,
CASE
WHEN bought_id = 0 THEN tokenA
ELSE tokenB
END AS tokenOut,
CASE
WHEN bought_id = 0 THEN tokenB
ELSE tokenA
END AS tokenIn,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__fact_event_logs') }}
INNER JOIN pools p
ON p.pool_address = contract_address
WHERE
topics [0] :: STRING = '0xb2e76ae99761dc136e598d4a629bb347eccb9532a5f8bbd72e18467c3c34cc98' --TokenExchange
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
)
SELECT
block_number,
block_timestamp,
origin_function_signature,
origin_from_address,
origin_to_address,
tx_hash,
event_index,
contract_address,
buyer_address AS sender,
buyer_address AS tx_to,
sold_id,
tokens_sold,
bought_id,
tokens_bought,
amountIn AS amount_in_unadj,
amountOut AS amount_out_unadj,
tokenIn AS token_in,
tokenOut AS token_out,
'TokenExchange' AS event_name,
'pancakeswap-v2' AS platform,
_log_id,
_inserted_timestamp
FROM
swaps_base

View File

@ -1,116 +0,0 @@
version: 2
models:
- name: silver_dex__pancakeswap_v2_ss_swaps
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: BLOCK_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 2
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_NTZ
- TIMESTAMP_LTZ
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: CONTRACT_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: AMOUNT_IN_UNADJ
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: AMOUNT_IN
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: AMOUNT_OUT_UNADJ
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: AMOUNT_OUT
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: TOKEN_IN
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TOKEN_OUT
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: SYMBOL_IN
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- STRING
- VARCHAR
- name: SYMBOL_OUT
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- STRING
- VARCHAR
- name: TX_TO
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: PLATFORM
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- STRING
- VARCHAR
- name: EVENT_INDEX
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: _LOG_ID
tests:
- not_null
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null
- name: ORIGIN_FROM_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: ORIGIN_TO_ADDRESS
tests:
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+

View File

@ -1,80 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = 'pool_address',
cluster_by = ['block_timestamp::DATE'],
tags = ['silver_dex','defi','dex','curated']
) }}
WITH created_pools AS (
SELECT
block_number,
block_timestamp,
tx_hash,
contract_address,
regexp_substr_all(SUBSTR(DATA, 3, len(DATA)), '.{64}') AS segmented_data,
CONCAT('0x', SUBSTR(topics [1] :: STRING, 27, 40)) AS token0_address,
CONCAT('0x', SUBSTR(topics [2] :: STRING, 27, 40)) AS token1_address,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
's2c',
topics [3] :: STRING
)
) AS fee,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
's2c',
segmented_data [0] :: STRING
)
) AS tickSpacing,
CONCAT('0x', SUBSTR(segmented_data [1] :: STRING, 25, 40)) AS pool_address,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__fact_event_logs') }}
WHERE
contract_address = '0x0bfbcf9fa4f9c56b0f40a671ad40e0805a091865' --factory
AND topics [0] :: STRING = '0x783cca1c0412dd0d695e784568c96da2e9c22ff989357a2e8b1d9b2b4e6b7118' --paircreated
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(
_inserted_timestamp
) :: DATE
FROM
{{ this }}
)
{% endif %}
),
FINAL AS (
SELECT
block_number,
block_timestamp,
tx_hash,
contract_address,
token0_address,
token1_address,
fee :: INTEGER AS fee,
(
fee / 10000
) :: FLOAT AS fee_percent,
tickSpacing AS tick_spacing,
pool_address,
_log_id,
_inserted_timestamp
FROM
created_pools
)
SELECT
*
FROM
FINAL qualify(ROW_NUMBER() over(PARTITION BY pool_address
ORDER BY
_inserted_timestamp DESC)) = 1

View File

@ -1,31 +0,0 @@
version: 2
models:
- name: silver_dex__pancakeswap_v3_pools
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- POOL_ADDRESS
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: BLOCK_TIMESTAMP
tests:
- not_null
- name: TX_HASH
tests:
- not_null
- name: FEE
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- decimal
- float
- number
- name: POOL_ADDRESS
tests:
- not_null
- name: TICK_SPACING
tests:
- not_null

View File

@ -1,152 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = 'block_number',
cluster_by = ['block_timestamp::DATE'],
tags = ['silver_dex','defi','dex','curated']
) }}
WITH pools AS (
SELECT
pool_address,
token0_address,
token1_address,
fee,
tick_spacing
FROM
{{ ref('silver_dex__pancakeswap_v3_pools') }}
),
base_swaps AS (
SELECT
l.block_number,
origin_function_signature,
origin_from_address,
origin_to_address,
l.block_timestamp,
l.tx_hash,
l.event_index,
l.contract_address,
regexp_substr_all(SUBSTR(l.data, 3, len(l.data)), '.{64}') AS segmented_data,
CONCAT('0x', SUBSTR(l.topics [1] :: STRING, 27, 40)) AS sender_address,
CONCAT('0x', SUBSTR(l.topics [2] :: STRING, 27, 40)) AS recipient_address,
COALESCE(
TRY_TO_NUMBER(
utils.udf_hex_to_int(
's2c',
segmented_data [0] :: STRING
)
),
0
) AS amount0,
COALESCE(
TRY_TO_NUMBER(
utils.udf_hex_to_int(
's2c',
segmented_data [1] :: STRING
)
),
0
) AS amount1,
COALESCE(
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [2] :: STRING
)
),
0
) AS sqrtPriceX96,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [3] :: STRING
)
) AS liquidity,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
's2c',
segmented_data [4] :: STRING
)
) AS tick,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
's2c',
segmented_data [5] :: STRING
)
) AS protocolFeesToken0,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
's2c',
segmented_data [6] :: STRING
)
) AS protocolFeesToken1,
ABS(GREATEST(amount0, amount1)) AS amountOut,
ABS(LEAST(amount0, amount1)) AS amountIn,
token0_address,
token1_address,
CASE
WHEN amount0 > 0 THEN token0_address
ELSE token1_address
END AS token_in,
CASE
WHEN amount1 < 0 THEN token1_address
ELSE token0_address
END AS token_out,
fee,
tick_spacing,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__fact_event_logs') }}
l
INNER JOIN pools p
ON l.contract_address = pool_address
WHERE
block_timestamp :: DATE > '2023-02-01'
AND topics [0] :: STRING = '0x19b47279256b2a23a1665c810c8d55a1758940ee09377d4f8d26497a3577dc83' --swap
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(
_inserted_timestamp
) :: DATE
FROM
{{ this }}
)
{% endif %}
)
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address AS pool_address,
sender_address,
recipient_address,
event_index,
amount0,
amount1,
sqrtPriceX96,
liquidity,
tick,
tick_spacing,
fee,
protocolFeesToken0,
protocolFeesToken1,
amountOut,
amountIn,
token0_address,
token1_address,
token_in,
token_out,
_log_id,
_inserted_timestamp
FROM
base_swaps

View File

@ -1,56 +0,0 @@
version: 2
models:
- name: silver_dex__pancakeswap_v3_swaps
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: AMOUNT0
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- decimal
- float
- number
- name: AMOUNT1
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- decimal
- float
- number
- name: BLOCK_NUMBER
tests:
- not_null
- name: BLOCK_TIMESTAMP
tests:
- not_null
- name: LIQUIDITY
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- decimal
- float
- number
- name: EVENT_INDEX
tests:
- not_null
- name: POOL_ADDRESS
tests:
- not_null
- name: RECIPIENT_ADDRESS
tests:
- not_null
- name: SENDER_ADDRESS
tests:
- not_null
- name: TICK
tests:
- not_null
- name: TX_HASH
tests:
- not_null

View File

@ -1,44 +0,0 @@
version: 2
models:
- name: silver_dex__complete_dex_liquidity_pools
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: BLOCK_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_LTZ
- TIMESTAMP_NTZ
- name: TX_HASH
tests:
- not_null
- name: CONTRACT_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: POOL_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: PLATFORM
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- STRING
- VARCHAR
- name: _ID
tests:
- not_null

Some files were not shown because too many files have changed in this diff Show More