AN-6265/pol-consolidate (#463)

* remove models

* package

* 2
This commit is contained in:
drethereum 2025-07-22 09:55:52 -06:00 committed by GitHub
parent 25eb46d0e9
commit 8b024bdaa7
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
119 changed files with 3 additions and 13687 deletions

View File

@ -1,12 +0,0 @@
chain,chain_id
ethereum,1
bsc,2
tron,3
solana,4
polygon,5
arbitrum,6
stellar,7
avalanche,8
base,9
optimism,10
celo,11
1 chain chain_id
2 ethereum 1
3 bsc 2
4 tron 3
5 solana 4
6 polygon 5
7 arbitrum 6
8 stellar 7
9 avalanche 8
10 base 9
11 optimism 10
12 celo 11

View File

@ -1,75 +0,0 @@
chain_name,chain_selector
apechain,14894068710063348487
arbitrum,4949039107694359620
l3x,3162193654116181371
treasure,1010349088906777999
areon,1939936305787790600
astar-zkevm,1540201334317828111
avalanche,6433500567565415381
dexalot,5463201557265485081
base,15971525489660198786
berachain,1294465214383781161
bsc,11344663589394136015
opbnb,465944652040885897
bitcichain,4874388048629246000
bitlayer,7937294810946806131
bittorrent,3776006016387883143
blast,4411394078118774322
bob,3849287863852499584
botanix,4560701533377838164
bsquared,5406759801798337480
celo,1346049177634351622
coinex_smart_chain,1761333065194157300
core,1224752112135636129
corn,9043146809313071210
cronos,1456215246176062136
cronos-zkevm,8788096068760390840
ethereum,5009297550715157269
fantom,3768048213127883732
filecoin,4561443241176882990
fraxtal,1462016016387883143
gnosis,465200170687744372
hashkey,7613811247471741961
hedera,3229138320728879060
immutable-zkevm,1237925231416731909
ink,3461204551265785888
kava,7550000543357438061
kroma,3719320017875267166
moonriver,1355020143337428062
lens,5608378062013572713
linea,4627098889531055414
mantle,1556008542357238666
merlin,241851231317828981
metis,8805746078405598895
mind,11690709103138290329
mode,7264351850409363825
morph,18164309074156128038
near,2039744413822257700
neonlink,8239338020728974000
optimism,3734403246176062136
plume,3208172210661564830
astar-polkadot,6422105447186081193
centrifuge,8175830712062617656
darwinia,8866418665544333000
moonbeam,1252863800116739621
polygon,4051577828743386545
polygon-zkevm,4348158687435793198
private-testnet-mica,4489326297382772450
ronin,6916147374840168594
rootstock,11964252391146578476
scroll,13204309965629103672
sei,9027416829622342829
shibarium,3993510008929295315
soneium,12505351618335765396
sonic,1673871237479749969
taiko,16468599424800719238
telos-evm,1477345371608778000
treasure,5214452172935136222
unichain,1923510103922296319
velas,374210358663784372
wemix,5142893604156789321
worldchain,2049429975587534727
xlayer,3016212468291539606
zircuit,17198166215261833993
zklink_nova,4350319965322101699
zksync,1562403441176082196
1 chain_name chain_selector
2 apechain 14894068710063348487
3 arbitrum 4949039107694359620
4 l3x 3162193654116181371
5 treasure 1010349088906777999
6 areon 1939936305787790600
7 astar-zkevm 1540201334317828111
8 avalanche 6433500567565415381
9 dexalot 5463201557265485081
10 base 15971525489660198786
11 berachain 1294465214383781161
12 bsc 11344663589394136015
13 opbnb 465944652040885897
14 bitcichain 4874388048629246000
15 bitlayer 7937294810946806131
16 bittorrent 3776006016387883143
17 blast 4411394078118774322
18 bob 3849287863852499584
19 botanix 4560701533377838164
20 bsquared 5406759801798337480
21 celo 1346049177634351622
22 coinex_smart_chain 1761333065194157300
23 core 1224752112135636129
24 corn 9043146809313071210
25 cronos 1456215246176062136
26 cronos-zkevm 8788096068760390840
27 ethereum 5009297550715157269
28 fantom 3768048213127883732
29 filecoin 4561443241176882990
30 fraxtal 1462016016387883143
31 gnosis 465200170687744372
32 hashkey 7613811247471741961
33 hedera 3229138320728879060
34 immutable-zkevm 1237925231416731909
35 ink 3461204551265785888
36 kava 7550000543357438061
37 kroma 3719320017875267166
38 moonriver 1355020143337428062
39 lens 5608378062013572713
40 linea 4627098889531055414
41 mantle 1556008542357238666
42 merlin 241851231317828981
43 metis 8805746078405598895
44 mind 11690709103138290329
45 mode 7264351850409363825
46 morph 18164309074156128038
47 near 2039744413822257700
48 neonlink 8239338020728974000
49 optimism 3734403246176062136
50 plume 3208172210661564830
51 astar-polkadot 6422105447186081193
52 centrifuge 8175830712062617656
53 darwinia 8866418665544333000
54 moonbeam 1252863800116739621
55 polygon 4051577828743386545
56 polygon-zkevm 4348158687435793198
57 private-testnet-mica 4489326297382772450
58 ronin 6916147374840168594
59 rootstock 11964252391146578476
60 scroll 13204309965629103672
61 sei 9027416829622342829
62 shibarium 3993510008929295315
63 soneium 12505351618335765396
64 sonic 1673871237479749969
65 taiko 16468599424800719238
66 telos-evm 1477345371608778000
67 treasure 5214452172935136222
68 unichain 1923510103922296319
69 velas 374210358663784372
70 wemix 5142893604156789321
71 worldchain 2049429975587534727
72 xlayer 3016212468291539606
73 zircuit 17198166215261833993
74 zklink_nova 4350319965322101699
75 zksync 1562403441176082196

View File

@ -1,41 +0,0 @@
chain,short_coin_type
Aptos,0x027d
Arbitrum,0x2329
Aurora,0x0a0a
Avalanche,0x2328
Base,0x2105
Bitcoin,0x0000
BNB Chain,0x02ca
Celo,0xce10
Coinweb,0x08ae
Conflux,0x01f7
Crypto Chain,0x018a
EOS,0x00c2
Ethereum,0x003c
Evmos,0x11bc
Fantom,0x03ef
Flow,0x021b
Gnosis,0x02bc
Harmony,0x03ff
Linea,0xe708
Manta,0x0263
Mantle,0x1388
Metis,0x0440
Moonbeam,0x0504
Moonriver,0x0505
Nautilus,0x56ce
Near,0x018d
opBNB,0x00cc
Optimism,0x0266
Polygon POS,0x03c6
Polygon zkEVM,0x044d
Scroll,0x2750
SKALE Europa,0x9296
SKALE Nebula,0xb4b1
Solana,0x01f5
Sui,0x0310
Terra,0x014a
Thorchain,0x03a3
Tron,0x00c3
zkSync Era,0x0324
zkFair,0xa70e
1 chain short_coin_type
2 Aptos 0x027d
3 Arbitrum 0x2329
4 Aurora 0x0a0a
5 Avalanche 0x2328
6 Base 0x2105
7 Bitcoin 0x0000
8 BNB Chain 0x02ca
9 Celo 0xce10
10 Coinweb 0x08ae
11 Conflux 0x01f7
12 Crypto Chain 0x018a
13 EOS 0x00c2
14 Ethereum 0x003c
15 Evmos 0x11bc
16 Fantom 0x03ef
17 Flow 0x021b
18 Gnosis 0x02bc
19 Harmony 0x03ff
20 Linea 0xe708
21 Manta 0x0263
22 Mantle 0x1388
23 Metis 0x0440
24 Moonbeam 0x0504
25 Moonriver 0x0505
26 Nautilus 0x56ce
27 Near 0x018d
28 opBNB 0x00cc
29 Optimism 0x0266
30 Polygon POS 0x03c6
31 Polygon zkEVM 0x044d
32 Scroll 0x2750
33 SKALE Europa 0x9296
34 SKALE Nebula 0xb4b1
35 Solana 0x01f5
36 Sui 0x0310
37 Terra 0x014a
38 Thorchain 0x03a3
39 Tron 0x00c3
40 zkSync Era 0x0324
41 zkFair 0xa70e

View File

@ -1,98 +0,0 @@
destination_chain,standard_destination_chain
acala,acala
algorand,algorand
aptos,aptos
arbitrum,arbitrum
arbitrum nova,arbitrum nova
arbitrum one,arbitrum
archway,archway
astar,astar
aurora,aurora
aurora mainnet,aurora
avalanche,avalanche
avalanche c-chain,avalanche
base,base
bnb,bsc
bnb chain,bsc
bnb smart chain mainnet,bsc
boba bnb mainnet,boba
boba network,boba
bsc,bsc
canto,canto
carbon,carbon
celo,celo
celo mainnet,celo
coinweb,coinweb
conflux,conflux
conflux espace,conflux
crab network,crab
crescent,crescent
cronos mainnet,cronos
crypto chain,crypto
dfk chain,dfk
dogechain mainnet,dogechain
eos,eos
ethereum,ethereum
ethereum mainnet,ethereum
evmos,evmos
fantom,fantom
fantom opera,fantom
filecoin,filecoin
fuse,fuse
gnosis,gnosis
harmony mainnet shard 0,harmony
huobi eco chain mainnet,huobi eco
injective,injective
juno,juno
karura,karura
kava,kava
klaytn,klaytn
klaytn mainnet cypress,klaytn
kujira,kujira
linea,linea
manta,manta
mantle,mantle
metis,metis
metis andromeda mainnet,metis
moonbeam,moonbeam
moonriver,moonriver
nautilus,nautilus
near,near
neutron,neutron
oasis,oasis
okxchain mainnet,okxchain
ontology mainnet,ontology
op mainnet,optimism
opbnb,opbnb
optimism,optimism
osmosis,osmosis
polygon,polygon
polygon mainnet,polygon
polygon pos,polygon
polygon zkevm,polygon zkevm
ronin,ronin
scroll,scroll
secret-snip,secret
sei,sei
skale europa,skale europa
skale nebula,skale nebula
solana,solana
stargaze,stargaze
starknet,starknet
sui,sui
telos evm mainnet,telos
terra,terra
terra-2,terra2
terra2,terra2
tezos,tezos
tron,tron
umee,umee
waves,waves
xpla,xpla
xrpl,xrpl
zkfair,zkfair
zksync era,zksync era
zksync era mainnet,zksync era
zksync lite,zksync lite
zora,zora
zzz,zzz
1 destination_chain standard_destination_chain
2 acala acala
3 algorand algorand
4 aptos aptos
5 arbitrum arbitrum
6 arbitrum nova arbitrum nova
7 arbitrum one arbitrum
8 archway archway
9 astar astar
10 aurora aurora
11 aurora mainnet aurora
12 avalanche avalanche
13 avalanche c-chain avalanche
14 base base
15 bnb bsc
16 bnb chain bsc
17 bnb smart chain mainnet bsc
18 boba bnb mainnet boba
19 boba network boba
20 bsc bsc
21 canto canto
22 carbon carbon
23 celo celo
24 celo mainnet celo
25 coinweb coinweb
26 conflux conflux
27 conflux espace conflux
28 crab network crab
29 crescent crescent
30 cronos mainnet cronos
31 crypto chain crypto
32 dfk chain dfk
33 dogechain mainnet dogechain
34 eos eos
35 ethereum ethereum
36 ethereum mainnet ethereum
37 evmos evmos
38 fantom fantom
39 fantom opera fantom
40 filecoin filecoin
41 fuse fuse
42 gnosis gnosis
43 harmony mainnet shard 0 harmony
44 huobi eco chain mainnet huobi eco
45 injective injective
46 juno juno
47 karura karura
48 kava kava
49 klaytn klaytn
50 klaytn mainnet cypress klaytn
51 kujira kujira
52 linea linea
53 manta manta
54 mantle mantle
55 metis metis
56 metis andromeda mainnet metis
57 moonbeam moonbeam
58 moonriver moonriver
59 nautilus nautilus
60 near near
61 neutron neutron
62 oasis oasis
63 okxchain mainnet okxchain
64 ontology mainnet ontology
65 op mainnet optimism
66 opbnb opbnb
67 optimism optimism
68 osmosis osmosis
69 polygon polygon
70 polygon mainnet polygon
71 polygon pos polygon
72 polygon zkevm polygon zkevm
73 ronin ronin
74 scroll scroll
75 secret-snip secret
76 sei sei
77 skale europa skale europa
78 skale nebula skale nebula
79 solana solana
80 stargaze stargaze
81 starknet starknet
82 sui sui
83 telos evm mainnet telos
84 terra terra
85 terra-2 terra2
86 terra2 terra2
87 tezos tezos
88 tron tron
89 umee umee
90 waves waves
91 xpla xpla
92 xrpl xrpl
93 zkfair zkfair
94 zksync era zksync era
95 zksync era mainnet zksync era
96 zksync lite zksync lite
97 zora zora
98 zzz zzz

View File

@ -1,13 +0,0 @@
chain_name,chain_id
Arbitrum,110
Avalanche,106
Base,184
BNB,102
Ethereum,101
Fantom,112
Kava,177
Linea,183
Mantle,181
Metis,151
Optimism,111
Polygon,109
1 chain_name chain_id
2 Arbitrum 110
3 Avalanche 106
4 Base 184
5 BNB 102
6 Ethereum 101
7 Fantom 112
8 Kava 177
9 Linea 183
10 Mantle 181
11 Metis 151
12 Optimism 111
13 Polygon 109

View File

@ -1,37 +0,0 @@
chain_name,wormhole_chain_id
acala,12
algorand,8
aptos,22
arbitrum,23
aurora,9
avalanche,6
avalanche,43114
base,30
bsc,4
celo,14
cosmoshub,4000
ethereum,2
evmos,4001
fantom,10
gnosis,25
injective,19
karura,11
klaytn,13
kujira,4002
moonbeam,16
near,15
neon,17
oasis,7
optimism,24
osmosis,20
polygon,5
polygon,137
pythnet,26
rootstock,33
sei,32
sepolia,10002
solana,1
sui,21
terra,3
terra2,18
xpla,28
1 chain_name wormhole_chain_id
2 acala 12
3 algorand 8
4 aptos 22
5 arbitrum 23
6 aurora 9
7 avalanche 6
8 avalanche 43114
9 base 30
10 bsc 4
11 celo 14
12 cosmoshub 4000
13 ethereum 2
14 evmos 4001
15 fantom 10
16 gnosis 25
17 injective 19
18 karura 11
19 klaytn 13
20 kujira 4002
21 moonbeam 16
22 near 15
23 neon 17
24 oasis 7
25 optimism 24
26 osmosis 20
27 polygon 5
28 polygon 137
29 pythnet 26
30 rootstock 33
31 sei 32
32 sepolia 10002
33 solana 1
34 sui 21
35 terra 3
36 terra2 18
37 xpla 28

View File

@ -106,6 +106,8 @@ models:
+enabled: false
bridge:
+enabled: true
dex:
+enabled: true
scores_package:
+enabled: true

View File

@ -1,42 +0,0 @@
{{ config(
materialized = 'view',
persist_docs ={ "relation": true,
"columns": true },
meta={
'database_tags':{
'table': {
'PROTOCOL': 'BALANCER, CURVE, DODO, FRAXSWAP, KYBERSWAP, QUICKSWAP, SUSHISWAP, UNISWAP',
'PURPOSE': 'DEX, LIQUIDITY, POOLS, LP, SWAPS',
}
}
},
tags = ['gold','defi','dex','curated']
) }}
SELECT
block_number AS creation_block,
block_timestamp AS creation_time,
tx_hash AS creation_tx,
platform,
contract_address AS factory_address,
pool_address,
pool_name,
tokens,
symbols,
decimals,
COALESCE (
complete_dex_liquidity_pools_id,
{{ dbt_utils.generate_surrogate_key(
['pool_address']
) }}
) AS dim_dex_liquidity_pools_id,
COALESCE(
inserted_timestamp,
'2000-01-01'
) AS inserted_timestamp,
COALESCE(
modified_timestamp,
'2000-01-01'
) AS modified_timestamp
FROM
{{ ref('silver_dex__complete_dex_liquidity_pools') }}

View File

@ -1,63 +0,0 @@
{{ config(
materialized = 'view',
persist_docs ={ "relation": true,
"columns": true },
meta ={
'database_tags':{
'table':{
'PROTOCOL': 'ACROSS, ALLBRIDGE, AXELAR, CELER, CBRIDGE, DLN, DEBRIDGE, EYWA, HOP, MESON, MULTICHAIN, STARGATE, SYMBIOSIS, SYNAPSE, WORMHOLE, CCIP',
'PURPOSE': 'BRIDGE'
} } },
tags = ['gold','defi','bridge','curated','ez']
) }}
SELECT
block_number,
block_timestamp,
origin_from_address,
origin_to_address,
origin_function_signature,
tx_hash,
event_index,
bridge_address,
event_name,
platform,
sender,
receiver,
destination_chain_receiver,
COALESCE(
c.standardized_name,
b.destination_chain
) AS destination_chain,
destination_chain_id,
token_address,
token_symbol,
amount_unadj,
amount,
ROUND(
CASE
WHEN amount_usd < 1e+15 THEN amount_usd
ELSE NULL
END,
2
) AS amount_usd,
token_is_verified,
COALESCE (
complete_bridge_activity_id,
{{ dbt_utils.generate_surrogate_key(
['_id']
) }}
) AS ez_bridge_activity_id,
COALESCE(
inserted_timestamp,
'2000-01-01'
) AS inserted_timestamp,
COALESCE(
modified_timestamp,
'2000-01-01'
) AS modified_timestamp
FROM
{{ ref('silver_bridge__complete_bridge_activity') }}
b
LEFT JOIN {{ ref('silver_bridge__standard_chain_seed') }} C
ON b.destination_chain = C.variation

View File

@ -1,81 +0,0 @@
{{ config(
materialized = 'view',
persist_docs ={ "relation": true,
"columns": true },
meta={
'database_tags':{
'table': {
'PROTOCOL': 'WOOFI, KYBERSWAP, DODO, QUICKSWAP, FRAX, UNISWAP, HASHFLOW, BALANCER, SUSHI, CURVE',
'PURPOSE': 'DEX, SWAPS'
}
}
},
tags = ['gold','defi','dex','curated','ez']
) }}
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
pool_name,
event_name,
amount_in_unadj,
amount_in,
ROUND(
CASE
WHEN token_in <> '0x0d500b1d8e8ef31e21c99d1db9a6444d3adf1270' or not token_in_is_verified
AND (
amount_out_usd IS NULL
OR ABS((amount_in_usd - amount_out_usd) / NULLIF(amount_out_usd, 0)) > 0.75
OR ABS((amount_in_usd - amount_out_usd) / NULLIF(amount_in_usd, 0)) > 0.75
) THEN NULL
ELSE amount_in_usd
END,
2
) AS amount_in_usd,
amount_out_unadj,
amount_out,
ROUND(
CASE
WHEN token_out <> '0x0d500b1d8e8ef31e21c99d1db9a6444d3adf1270' or not token_out_is_verified
AND (
amount_in_usd IS NULL
OR ABS((amount_out_usd - amount_in_usd) / NULLIF(amount_in_usd, 0)) > 0.75
OR ABS((amount_out_usd - amount_in_usd) / NULLIF(amount_out_usd, 0)) > 0.75
) THEN NULL
ELSE amount_out_usd
END,
2
) AS amount_out_usd,
sender,
tx_to,
event_index,
platform,
protocol,
version as protocol_version,
token_in,
token_in_is_verified,
token_out,
token_out_is_verified,
symbol_in,
symbol_out,
_log_id,
COALESCE (
complete_dex_swaps_id,
{{ dbt_utils.generate_surrogate_key(
['tx_hash','event_index']
) }}
) AS ez_dex_swaps_id,
COALESCE(
inserted_timestamp,
'2000-01-01'
) AS inserted_timestamp,
COALESCE(
modified_timestamp,
'2000-01-01'
) AS modified_timestamp
FROM {{ ref('silver_dex__complete_dex_swaps') }}

View File

@ -1,100 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = "block_number",
cluster_by = ['block_timestamp::DATE'],
tags = ['silver_bridge','defi','bridge','curated']
) }}
WITH base_evt AS (
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
'across' AS NAME,
event_index,
topics [0] :: STRING AS topic_0,
event_name,
TRY_TO_NUMBER(
decoded_log :"amount" :: STRING
) AS amount,
TRY_TO_NUMBER(
decoded_log :"depositId" :: STRING
) AS depositId,
decoded_log :"depositor" :: STRING AS depositor,
TRY_TO_NUMBER(
decoded_log :"destinationChainId" :: STRING
) AS destinationChainId,
decoded_log :"message" :: STRING AS message,
TRY_TO_NUMBER(
decoded_log :"originChainId" :: STRING
) AS originChainId,
decoded_log :"originToken" :: STRING AS originToken,
TRY_TO_TIMESTAMP(
decoded_log :"quoteTimestamp" :: STRING
) AS quoteTimestamp,
decoded_log :"recipient" :: STRING AS recipient,
TRY_TO_NUMBER(
decoded_log :"relayerFeePct" :: STRING
) AS relayerFeePct,
decoded_log,
event_removed,
IFF(tx_succeeded,'SUCCESS','FAIL') AS tx_status,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__ez_decoded_event_logs') }}
WHERE
topics [0] :: STRING = '0xafc4df6845a4ab948b492800d3d8a25d538a102a2bc07cd01f1cfa097fddcff6'
AND contract_address = '0x9295ee1d8c5b022be115a2ad3c30c72e34e7f096'
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
)
SELECT
block_number,
block_timestamp,
origin_function_signature,
origin_from_address,
origin_to_address,
tx_hash,
event_index,
topic_0,
event_name,
event_removed,
tx_status,
contract_address AS bridge_address,
name AS platform,
depositor AS sender,
recipient AS receiver,
recipient AS destination_chain_receiver,
destinationChainId AS destination_chain_id,
amount,
depositId AS deposit_id,
message,
originChainId AS origin_chain_id,
originToken AS token_address,
quoteTimestamp AS quote_timestamp,
relayerFeePct AS relayer_fee_pct,
_log_id,
_inserted_timestamp
FROM
base_evt

View File

@ -1,69 +0,0 @@
version: 2
models:
- name: silver_bridge__across_fundsdeposited
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: BLOCK_TIMESTAMP
tests:
- not_null
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null
- name: ORIGIN_FROM_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: ORIGIN_TO_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: EVENT_INDEX
tests:
- not_null
- name: EVENT_NAME
tests:
- not_null
- name: BRIDGE_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: SENDER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: RECEIVER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: DESTINATION_CHAIN_RECEIVER
tests:
- not_null
- name: AMOUNT
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- DECIMAL
- FLOAT
- NUMBER
- name: TOKEN_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+

View File

@ -1,133 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = "block_number",
cluster_by = ['block_timestamp::DATE'],
tags = ['silver_bridge','defi','bridge','curated']
) }}
WITH base_evt AS (
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
'across-v3' AS NAME,
event_index,
topic_0,
CASE
WHEN topic_0 = '0x32ed1a409ef04c7b0227189c3a103dc5ac10e775a15b785dcc510201f7c25ad3' THEN 'FundsDeposited'
WHEN topic_0 = '0xa123dc29aebf7d0c3322c8eeb5b999e859f39937950ed31056532713d0de396f' THEN 'V3FundsDeposited'
END AS event_name,
topics,
DATA,
regexp_substr_all(SUBSTR(DATA, 3, len(DATA)), '.{64}') AS segmented_data,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
topics [1] :: STRING
)
) AS destinationChainId,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
topics [2] :: STRING
)
) AS depositId,
CONCAT('0x', SUBSTR(topics [3] :: STRING, 27, 40)) AS depositor,
CONCAT('0x', SUBSTR(segmented_data [0] :: STRING, 25, 40)) AS inputToken,
CONCAT('0x', SUBSTR(segmented_data [1] :: STRING, 25, 40)) AS outputToken,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [2] :: STRING
)
) AS inputAmount,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [3] :: STRING
)
) AS outputAmount,
TRY_TO_TIMESTAMP(
utils.udf_hex_to_int(
segmented_data [4] :: STRING
)
) AS quoteTimestamp,
TRY_TO_TIMESTAMP(
utils.udf_hex_to_int(
segmented_data [5] :: STRING
)
) AS fillDeadline,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [6] :: STRING
)
) AS exclusivityDeadline,
CONCAT('0x', SUBSTR(segmented_data [7] :: STRING, 25, 40)) AS recipient,
CONCAT('0x', SUBSTR(segmented_data [8] :: STRING, 25, 40)) AS exclusiveRelayer,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [9] :: STRING
)
) AS relayerFeePct,
segmented_data [10] :: STRING AS message,
event_removed,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__fact_event_logs') }}
WHERE
topic_0 IN (
'0x32ed1a409ef04c7b0227189c3a103dc5ac10e775a15b785dcc510201f7c25ad3',
'0xa123dc29aebf7d0c3322c8eeb5b999e859f39937950ed31056532713d0de396f'
)
AND contract_address = '0x9295ee1d8c5b022be115a2ad3c30c72e34e7f096'
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
)
SELECT
block_number,
block_timestamp,
origin_function_signature,
origin_from_address,
origin_to_address,
tx_hash,
event_index,
topic_0,
event_name,
event_removed,
contract_address AS bridge_address,
NAME AS platform,
depositor AS sender,
recipient AS receiver,
recipient AS destination_chain_receiver,
destinationChainId AS destination_chain_id,
inputAmount AS amount,
inputToken AS token_address,
depositId AS deposit_id,
message,
quoteTimestamp AS quote_timestamp,
relayerFeePct AS relayer_fee_pct,
exclusiveRelayer AS exclusive_relayer,
exclusivityDeadline AS exclusivity_deadline,
fillDeadline AS fill_deadline,
outputAmount AS output_amount,
outputToken AS output_token,
_log_id,
_inserted_timestamp
FROM
base_evt

View File

@ -1,72 +0,0 @@
version: 2
models:
- name: silver_bridge__across_v3fundsdeposited
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: BLOCK_TIMESTAMP
tests:
- not_null
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null
- name: ORIGIN_FROM_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: ORIGIN_TO_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: EVENT_INDEX
tests:
- not_null
- name: EVENT_NAME
tests:
- not_null
- name: BRIDGE_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: SENDER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: RECEIVER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: DESTINATION_CHAIN_RECEIVER
tests:
- not_null
- name: AMOUNT
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- DECIMAL
- FLOAT
- NUMBER
- name: TOKEN_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: _INSERTED_TIMESTAMP
tests:
- not_null

View File

@ -1,153 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = "block_number",
cluster_by = ['block_timestamp::DATE'],
tags = ['silver_bridge','defi','bridge','curated']
) }}
WITH base_evt AS (
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
'allbridge' AS NAME,
event_index,
topics [0] :: STRING AS topic_0,
event_name,
TRY_TO_NUMBER(
decoded_log :"amount" :: STRING
) AS amount,
utils.udf_hex_to_string(
SUBSTRING(
decoded_log :"destination" :: STRING,
3
)
) AS destination_chain_symbol,
decoded_log :"lockId" :: STRING AS lockId,
decoded_log :"recipient" :: STRING AS recipient,
decoded_log :"sender" :: STRING AS sender,
utils.udf_hex_to_string(
SUBSTRING(
decoded_log :"tokenSource" :: STRING,
3
)
) AS token_source,
REGEXP_REPLACE(
decoded_log :"tokenSourceAddress" :: STRING,
'0+$',
''
) AS tokenSourceAddress,
decoded_log,
event_removed,
IFF(tx_succeeded,'SUCCESS','FAIL') AS tx_status,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__ez_decoded_event_logs') }}
WHERE
topics [0] :: STRING = '0x884a8def17f0d5bbb3fef53f3136b5320c9b39f75afb8985eeab9ea1153ee56d'
AND contract_address = '0xbbbd1bbb4f9b936c3604906d7592a644071de884'
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
)
SELECT
block_number,
block_timestamp,
origin_function_signature,
origin_from_address,
origin_to_address,
tx_hash,
event_index,
topic_0,
event_name,
event_removed,
tx_status,
contract_address AS bridge_address,
NAME AS platform,
sender,
recipient AS receiver,
amount,
lockId AS lock_id,
CASE
WHEN destination_chain_symbol = 'AURO' THEN 'aurora mainnet'
WHEN destination_chain_symbol = 'AVA' THEN 'avalanche c-chain'
WHEN destination_chain_symbol = 'BSC' THEN 'bnb smart chain mainnet'
WHEN destination_chain_symbol = 'CELO' THEN 'celo mainnet'
WHEN destination_chain_symbol = 'ETH' THEN 'ethereum mainnet'
WHEN destination_chain_symbol = 'FTM' THEN 'fantom opera'
WHEN destination_chain_symbol = 'HECO' THEN 'huobi eco chain mainnet'
WHEN destination_chain_symbol = 'KLAY' THEN 'klaytn mainnet cypress'
WHEN destination_chain_symbol = 'POL' THEN 'polygon mainnet'
WHEN destination_chain_symbol = 'SOL' THEN 'solana'
WHEN destination_chain_symbol = 'TRA' THEN 'terra'
WHEN destination_chain_symbol = 'TEZ' THEN 'tezos'
WHEN destination_chain_symbol = 'WAVE' THEN 'waves'
ELSE LOWER(destination_chain_symbol)
END AS destination_chain,
CASE
WHEN token_source = 'AURO' THEN 'aurora mainnet'
WHEN token_source = 'AVA' THEN 'avalanche c-chain'
WHEN token_source = 'BSC' THEN 'bnb smart chain mainnet'
WHEN token_source = 'CELO' THEN 'celo mainnet'
WHEN token_source = 'ETH' THEN 'ethereum mainnet'
WHEN token_source = 'FTM' THEN 'fantom opera'
WHEN token_source = 'HECO' THEN 'huobi eco chain mainnet'
WHEN token_source = 'KLAY' THEN 'klaytn mainnet cypress'
WHEN token_source = 'POL' THEN 'polygon mainnet'
WHEN token_source = 'SOL' THEN 'solana'
WHEN token_source = 'TRA' THEN 'terra'
WHEN token_source = 'TEZ' THEN 'tezos'
WHEN token_source = 'WAVE' THEN 'waves'
ELSE LOWER(token_source)
END AS source_chain,
CASE
WHEN destination_chain = 'solana' THEN utils.udf_hex_to_base58(recipient)
WHEN destination_chain = 'waves' THEN utils.udf_hex_to_base58(SUBSTR(recipient,1,54))
WHEN destination_chain ILIKE 'terra%' THEN utils.udf_hex_to_bech32(recipient, SUBSTR(destination_chain, 1, 5))
WHEN destination_chain = 'tezos' THEN utils.udf_hex_to_tezos(CONCAT('0x', SUBSTR(recipient, 7, 40)), 'tz1')
WHEN destination_chain = 'near' THEN utils.udf_hex_to_string(SUBSTR(recipient,3))
WHEN destination_chain IN (
'aurora mainnet',
'avalanche c-chain',
'bnb smart chain mainnet',
'celo mainnet',
'fantom opera',
'fuse',
'huobi eco chain mainnet',
'klaytn mainnet cypress',
'polygon mainnet'
) THEN SUBSTR(
recipient,
1,
42
)
WHEN destination_chain = 'zzz' THEN origin_from_address
ELSE recipient
END AS destination_chain_receiver,
tokenSourceAddress AS token_address,
_log_id,
_inserted_timestamp
FROM
base_evt
WHERE
source_chain = 'polygon mainnet'

View File

@ -1,69 +0,0 @@
version: 2
models:
- name: silver_bridge__allbridge_sent
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: BLOCK_TIMESTAMP
tests:
- not_null
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null
- name: ORIGIN_FROM_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: ORIGIN_TO_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: EVENT_INDEX
tests:
- not_null
- name: EVENT_NAME
tests:
- not_null
- name: BRIDGE_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: SENDER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: RECEIVER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: DESTINATION_CHAIN_RECEIVER
tests:
- not_null
- name: AMOUNT
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- DECIMAL
- FLOAT
- NUMBER
- name: TOKEN_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+

View File

@ -1,157 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = "block_number",
cluster_by = ['block_timestamp::DATE'],
tags = ['silver_bridge','defi','bridge','curated']
) }}
WITH base_evt AS (
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
'allbridge' AS platform,
event_index,
'TokensSent' AS event_name,
regexp_substr_all(SUBSTR(DATA, 3, len(DATA)), '.{64}') AS segmented_data,
TRY_TO_NUMBER(utils.udf_hex_to_int(segmented_data [0] :: STRING)) AS amount,
TRY_TO_NUMBER(utils.udf_hex_to_int(segmented_data [2] :: STRING)) AS destinationChainId,
origin_from_address AS sender,
origin_from_address AS recipient,
TRY_TO_NUMBER(utils.udf_hex_to_int(segmented_data [4] :: STRING)) AS nonce,
TRY_TO_NUMBER(utils.udf_hex_to_int(segmented_data [5] :: STRING)) AS messenger,
tx_succeeded,
CONCAT(
tx_hash,
'-',
event_index
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__fact_event_logs') }}
WHERE
topics [0] = '0x9cd6008e8d4ebd34fd9d022278fec7f95d133780ecc1a0dea459fae3e9675390' --TokensSent
AND contract_address = '0x7775d63836987f444e2f14aa0fa2602204d7d3e0' --Allbridge
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
),
lp_evt AS (
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
event_index,
regexp_substr_all(SUBSTR(DATA, 3, len(DATA)), '.{64}') AS segmented_data,
CONCAT('0x', SUBSTR(segmented_data [0] :: STRING, 25, 40)) AS sender,
CONCAT('0x', SUBSTR(segmented_data [1] :: STRING, 25, 40)) AS token,
TRY_TO_NUMBER(utils.udf_hex_to_int(segmented_data [2] :: STRING)) AS amount,
TRY_TO_NUMBER(utils.udf_hex_to_int(segmented_data [3] :: STRING)) AS vUsdAmount,
TRY_TO_NUMBER(utils.udf_hex_to_int(segmented_data [4] :: STRING)) AS fee,
tx_succeeded,
CONCAT(
tx_hash,
'-',
event_index
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__fact_event_logs') }}
WHERE
topics [0] = '0xa930da1d3f27a25892307dd59cec52dd9b881661a0f20364757f83a0da2f6873' --SwappedToVUsd
AND contract_address IN (
'0x0394c4f17738a10096510832beab89a9dd090791',
--USDT LP
'0x4c42dfdbb8ad654b42f66e0bd4dbdc71b52eb0a6',
--USDC LP
'0x58cc621c62b0aa9babfae5651202a932279437da' --USDC.e LP
)
AND tx_hash IN (
SELECT
tx_hash
FROM
base_evt
)
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
)
SELECT
s.block_number,
s.block_timestamp,
s.tx_hash,
s.origin_function_signature,
s.origin_from_address,
s.origin_to_address,
s.contract_address AS bridge_address,
s.event_index,
s.event_name,
s.platform,
lp.amount,
lp.token AS token_address,
s.sender,
s.recipient AS receiver,
C.chain AS destination_chain,
s.destinationChainId AS destination_chain_id,
CASE
WHEN C.chain = 'solana' THEN utils.udf_hex_to_base58(CONCAT('0x', s.segmented_data [1] :: STRING))
WHEN C.chain = 'stellar' THEN s.segmented_data [1] :: STRING
ELSE CONCAT(
'0x',
SUBSTR(
s.segmented_data [1] :: STRING,
25,
40
)
)
END AS destination_chain_receiver,
CASE
WHEN C.chain = 'solana' THEN utils.udf_hex_to_base58(CONCAT('0x', s.segmented_data [3] :: STRING))
WHEN C.chain = 'stellar' THEN s.segmented_data [3] :: STRING
ELSE CONCAT(
'0x',
SUBSTR(
s.segmented_data [3] :: STRING,
25,
40
)
)
END AS destination_chain_token,
s.tx_succeeded,
s._log_id,
s._inserted_timestamp
FROM
base_evt s
INNER JOIN lp_evt lp
ON s.tx_hash = lp.tx_hash
AND s.block_number = lp.block_number
LEFT JOIN {{ ref('silver_bridge__allbridge_chain_id_seed') }} C
ON s.destinationChainId = C.chain_id qualify(ROW_NUMBER() over (PARTITION BY s._log_id
ORDER BY
s._inserted_timestamp DESC)) = 1

View File

@ -1,69 +0,0 @@
version: 2
models:
- name: silver_bridge__allbridge_tokens_sent
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: BLOCK_TIMESTAMP
tests:
- not_null
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null
- name: ORIGIN_FROM_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: ORIGIN_TO_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: EVENT_INDEX
tests:
- not_null
- name: EVENT_NAME
tests:
- not_null
- name: BRIDGE_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: SENDER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: RECEIVER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: DESTINATION_CHAIN_RECEIVER
tests:
- not_null
- name: AMOUNT
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- DECIMAL
- FLOAT
- NUMBER
- name: TOKEN_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+

View File

@ -1,232 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = "block_number",
cluster_by = ['block_timestamp::DATE'],
tags = ['silver_bridge','defi','bridge','curated']
) }}
WITH base_evt AS (
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
'axelar' AS NAME,
event_index,
topics [0] :: STRING AS topic_0,
event_name,
TRY_TO_NUMBER(
decoded_log :"amount" :: STRING
) AS amount,
decoded_log :"destinationChain" :: STRING AS destinationChain,
LOWER(
decoded_log :"destinationContractAddress" :: STRING
) AS destinationContractAddress,
decoded_log :"payload" :: STRING AS payload,
origin_from_address AS recipient,
decoded_log :"payloadHash" :: STRING AS payloadHash,
decoded_log :"sender" :: STRING AS sender,
decoded_log :"symbol" :: STRING AS symbol,
decoded_log,
event_removed,
IFF(tx_succeeded,'SUCCESS','FAIL') AS tx_status,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__ez_decoded_event_logs') }}
WHERE
topics [0] :: STRING = '0x7e50569d26be643bda7757722291ec66b1be66d8283474ae3fab5a98f878a7a2'
AND contract_address = '0x6f015f16de9fc8791b234ef68d486d2bf203fba8'
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
),
native_gas_paid AS (
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
'axelar' AS NAME,
event_index,
topics [0] :: STRING AS topic_0,
event_name,
TRY_TO_NUMBER(
decoded_log :"amount" :: STRING
) AS amount,
decoded_log :"destinationChain" :: STRING AS destinationChain,
LOWER(
decoded_log :"destinationAddress" :: STRING
) AS destinationAddress,
TRY_TO_NUMBER(
decoded_log :"gasFeeAmount" :: STRING
) AS gasFeeAmount,
decoded_log :"payloadHash" :: STRING AS payloadHash,
decoded_log :"refundAddress" :: STRING AS refundAddress,
decoded_log :"sourceAddress" :: STRING AS sourceAddress,
decoded_log :"symbol" :: STRING AS symbol,
decoded_log,
event_removed,
IFF(tx_succeeded,'SUCCESS','FAIL') AS tx_status,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__ez_decoded_event_logs') }}
WHERE
topics [0] :: STRING = '0x999d431b58761213cf53af96262b67a069cbd963499fd8effd1e21556217b841'
AND contract_address = '0x2d5d7d31f671f86c782533cc367f14109a082712'
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
),
transfers AS (
SELECT
block_number,
tx_hash,
event_index,
contract_address AS token_address,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__ez_token_transfers') }}
WHERE
from_address = '0xce16f69375520ab01377ce7b88f5ba8c48f8d666'
AND to_address IN (
'0x6f015f16de9fc8791b234ef68d486d2bf203fba8',
'0x0000000000000000000000000000000000000000'
)
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
),
FINAL AS (
SELECT
b.block_number,
b.block_timestamp,
b.origin_function_signature,
b.origin_from_address,
b.origin_to_address,
b.tx_hash,
b.event_index,
b.topic_0,
b.event_name,
b.event_removed,
b.tx_status,
b.contract_address AS bridge_address,
b.name AS platform,
b.origin_from_address AS sender,
CASE
WHEN b.recipient = '0x0000000000000000000000000000000000000000' THEN refundAddress
ELSE b.recipient
END AS receiver,
CASE
WHEN LOWER(
b.destinationChain
) = 'avalanche' THEN 'avalanche c-chain'
WHEN LOWER(
b.destinationChain
) = 'binance' THEN 'bnb smart chain mainnet'
WHEN LOWER(
b.destinationChain
) = 'celo' THEN 'celo mainnet'
WHEN LOWER(
b.destinationChain
) = 'ethereum' THEN 'ethereum mainnet'
WHEN LOWER(
b.destinationChain
) = 'fantom' THEN 'fantom opera'
WHEN LOWER(
b.destinationChain
) = 'polygon' THEN 'polygon mainnet'
ELSE LOWER(
b.destinationChain
)
END AS destination_chain,
b.destinationContractAddress AS destination_contract_address,
CASE
WHEN destination_chain IN (
'arbitrum',
'avalanche c-chain',
'base',
'bnb smart chain mainnet',
'celo mainnet',
'ethereum mainnet',
'fantom opera',
'filecoin',
'kava',
'linea',
'mantle',
'moonbeam',
'optimism',
'polygon mainnet',
'scroll'
) THEN receiver
ELSE destination_contract_address
END AS destination_chain_receiver,
b.amount,
b.payload,
b.payloadHash AS payload_hash,
b.symbol AS token_symbol,
t.token_address,
b._log_id,
b._inserted_timestamp
FROM
base_evt b
INNER JOIN transfers t
ON b.block_number = t.block_number
AND b.tx_hash = t.tx_hash
LEFT JOIN native_gas_paid n
ON n.block_number = b.block_number
AND n.tx_hash = b.tx_hash
)
SELECT
*
FROM
FINAL qualify (ROW_NUMBER() over (PARTITION BY _log_id
ORDER BY
_inserted_timestamp DESC)) = 1

View File

@ -1,74 +0,0 @@
version: 2
models:
- name: silver_bridge__axelar_contractcallwithtoken
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: BLOCK_TIMESTAMP
tests:
- not_null
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null
- name: ORIGIN_FROM_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: ORIGIN_TO_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: EVENT_INDEX
tests:
- not_null
- name: EVENT_NAME
tests:
- not_null
- name: BRIDGE_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: SENDER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: RECEIVER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: DESTINATION_CHAIN_RECEIVER
tests:
- not_null
- name: AMOUNT
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- DECIMAL
- FLOAT
- NUMBER
- name: TOKEN_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: _INSERTED_TIMESTAMP
tests:
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 3

View File

@ -1,146 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = "block_number",
cluster_by = ['block_timestamp::DATE'],
tags = ['silver_bridge','defi','bridge','curated']
) }}
WITH on_ramp_set AS (
SELECT
block_timestamp,
tx_hash,
event_name,
TRY_TO_NUMBER(
decoded_log :destChainSelector :: STRING
) AS destChainSelector,
chain_name,
decoded_log :onRamp :: STRING AS onRampAddress,
modified_timestamp
FROM
{{ ref('core__ez_decoded_event_logs') }}
INNER JOIN {{ ref('silver_bridge__ccip_chain_seed') }}
ON destChainSelector = chain_selector
WHERE
contract_address = LOWER('0x849c5ED5a80F5B408Dd4969b78c2C8fdf0565Bfe') -- ccip router
AND topic_0 = '0x1f7d0ec248b80e5c0dde0ee531c4fc8fdb6ce9a2b3d90f560c74acd6a7202f23' -- onrampset
AND tx_succeeded
AND event_removed = FALSE
{% if is_incremental() %}
AND modified_timestamp >= (
SELECT
MAX(modified_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND modified_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
),
ccip_sent AS (
SELECT
l.block_number,
l.block_timestamp,
l.tx_hash,
l.origin_function_signature,
l.origin_from_address,
l.origin_to_address,
contract_address,
l.event_name,
l.event_index,
regexp_substr_all(SUBSTR(DATA, 3, len(DATA)), '.{64}') AS segmented_data,
CONCAT(
'0x',
segmented_data [13] :: STRING
) AS message_id,
l.decoded_log,
decoded_log :message :feeToken :: STRING AS fee_token,
TRY_TO_NUMBER(
decoded_log :message :feeTokenAmount :: STRING
) AS fee_token_amount,
TRY_TO_NUMBER(
decoded_log :message :gasLimit :: STRING
) AS gas_limit,
TRY_TO_NUMBER(
decoded_log :message :nonce :: STRING
) AS nonce,
decoded_log :message :receiver :: STRING AS receiver,
decoded_log :message :sender :: STRING AS sender,
TRY_TO_NUMBER(
decoded_log :message :sequenceNumber :: STRING
) AS sequence_number,
TRY_TO_NUMBER(
decoded_log :message :sourceChainSelector :: STRING
) AS source_chain_selector,
destChainSelector AS dest_chain_selector,
chain_name,
decoded_log :message :tokenAmounts AS token_amounts,
ARRAY_SIZE(
decoded_log :message :tokenAmounts
) AS token_amounts_count,
CONCAT(
l.tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
l.modified_timestamp
FROM
{{ ref('core__ez_decoded_event_logs') }}
l
INNER JOIN on_ramp_set
ON onRampAddress = contract_address
WHERE
topic_0 = '0xd0c3c799bf9e2639de44391e7f524d229b2b55f5b1ea94b2bf7da42f7243dddd' -- CCIPSendRequested
AND tx_succeeded
AND event_removed = FALSE
{% if is_incremental() %}
AND l.modified_timestamp >= (
SELECT
MAX(modified_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND l.modified_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
)
SELECT
C.block_number,
C.block_timestamp,
C.origin_function_signature,
C.origin_from_address,
C.origin_to_address,
C.tx_hash,
C.event_name,
C.event_index,
'chainlink-ccip' AS platform,
'v1' AS version,
C.contract_address AS bridge_address,
C.message_id,
C.nonce,
C.receiver,
C.sender,
C.receiver AS destination_chain_receiver,
C.sequence_number,
C.source_chain_selector,
C.dest_chain_selector AS destination_chain_id,
C.chain_name AS destination_chain,
C.gas_limit,
C.fee_token,
-- Divide the fee evenly by the number of tokens in the array
C.fee_token_amount / C.token_amounts_count AS fee_token_amount_per_token,
C.token_amounts_count,
TRY_TO_NUMBER(
tokens.value :amount :: STRING
) AS amount_unadj,
tokens.value :token :: STRING AS token_address,
C._log_id,
C.modified_timestamp
FROM
ccip_sent C,
LATERAL FLATTEN(
input => C.token_amounts
) AS tokens
WHERE
token_amounts_count > 0

View File

@ -1,70 +0,0 @@
version: 2
models:
- name: silver_bridge__ccip_send_requested
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: BLOCK_TIMESTAMP
tests:
- not_null
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null
- name: ORIGIN_FROM_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: ORIGIN_TO_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: EVENT_INDEX
tests:
- not_null
- name: EVENT_NAME
tests:
- not_null
- name: BRIDGE_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: SENDER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: RECEIVER
tests:
- not_null
- name: DESTINATION_CHAIN_RECEIVER
tests:
- not_null
- name: AMOUNT_UNADJ
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- DECIMAL
- FLOAT
- NUMBER
- name: TOKEN_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: MODIFIED_TIMESTAMP
tests:
- not_null

View File

@ -1,98 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = "block_number",
cluster_by = ['block_timestamp::DATE'],
tags = ['silver_bridge','defi','bridge','curated']
) }}
WITH base_evt AS (
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
'celer_cbridge' AS NAME,
event_index,
topics [0] :: STRING AS topic_0,
event_name,
TRY_TO_NUMBER(
decoded_log :"amount" :: STRING
) AS amount,
TRY_TO_NUMBER(
decoded_log :"dstChainId" :: STRING
) AS dstChainId,
TRY_TO_NUMBER(
decoded_log :"maxSlippage" :: STRING
) AS maxSlippage,
TRY_TO_NUMBER(
decoded_log :"nonce" :: STRING
) AS nonce,
decoded_log :"receiver" :: STRING AS receiver,
decoded_log :"sender" :: STRING AS sender,
decoded_log :"token" :: STRING AS token,
decoded_log :"transferId" :: STRING AS transferId,
decoded_log,
event_removed,
IFF(tx_succeeded,'SUCCESS','FAIL') AS tx_status,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__ez_decoded_event_logs') }}
WHERE
topics [0] :: STRING = '0x89d8051e597ab4178a863a5190407b98abfeff406aa8db90c59af76612e58f01'
AND contract_address IN (
'0x88dcdc47d2f83a99cf0000fdf667a468bb958a78',
'0xa251c4691c1ffd7d9b128874c023427513d8ac5c',
'0xb5df797468e6e8f2cb293cd6e32939366e0f8733',
'0x02745032d2aeccdc90310d6cca32cb82c7e149dd',
'0xf5c6825015280cdfd0b56903f9f8b5a2233476f5'
)
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
)
SELECT
block_number,
block_timestamp,
origin_function_signature,
origin_from_address,
origin_to_address,
tx_hash,
event_index,
topic_0,
event_name,
event_removed,
tx_status,
contract_address AS bridge_address,
NAME AS platform,
sender,
receiver,
receiver AS destination_chain_receiver,
amount,
dstChainId AS destination_chain_id,
maxSlippage AS max_slippage,
nonce,
token AS token_address,
transferId AS transfer_id,
_log_id,
_inserted_timestamp
FROM
base_evt

View File

@ -1,74 +0,0 @@
version: 2
models:
- name: silver_bridge__celer_cbridge_send
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: BLOCK_TIMESTAMP
tests:
- not_null
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null
- name: ORIGIN_FROM_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: ORIGIN_TO_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: EVENT_INDEX
tests:
- not_null
- name: EVENT_NAME
tests:
- not_null
- name: BRIDGE_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: SENDER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: RECEIVER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: DESTINATION_CHAIN_RECEIVER
tests:
- not_null
- name: AMOUNT
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- DECIMAL
- FLOAT
- NUMBER
- name: TOKEN_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: _INSERTED_TIMESTAMP
tests:
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 3

View File

@ -1,125 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = "block_number",
cluster_by = ['block_timestamp::DATE'],
tags = ['silver_bridge','defi','bridge','curated']
) }}
WITH base_evt AS (
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
'dln_debridge' AS NAME,
event_index,
topics [0] :: STRING AS topic_0,
event_name,
DATA,
regexp_substr_all(SUBSTR(DATA, 3, len(DATA)), '.{64}') AS segmented_data,
CONCAT('0x', SUBSTR(segmented_data [24] :: STRING, 1, 40)) AS token_address,
decoded_log :"affiliateFee" :: STRING AS affiliateFee,
decoded_log :"metadata" :: STRING AS metadata,
TRY_TO_NUMBER(
decoded_log :"nativeFixFee" :: STRING
) AS nativeFixFee,
decoded_log :"order" AS order_obj,
decoded_log :"order" :"allowedCancelBeneficiarySrc" :: STRING AS allowedCancelBeneficiarySrc,
decoded_log :"order" :"allowedTakerDst" :: STRING AS allowedTakerDst,
decoded_log :"order" :"externalCall" :: STRING AS externalCall,
TRY_TO_NUMBER(
decoded_log :"order" :"giveAmount" :: STRING
) AS giveAmount,
TRY_TO_NUMBER(
decoded_log :"order" :"giveChainId" :: STRING
) AS giveChainId,
decoded_log :"order" :"givePatchAuthoritySrc" :: STRING AS givePatchAuthoritySrc,
decoded_log :"order" :"giveTokenAddress" :: STRING AS giveTokenAddress,
TRY_TO_NUMBER(
decoded_log :"order" :"makerOrderNonce" :: STRING
) AS makerOrderNonce,
decoded_log :"order" :"makerSrc" :: STRING AS makerSrc,
decoded_log :"order" :"orderAuthorityAddressDst" :: STRING AS orderAuthorityAddressDst,
CONCAT('0x', LEFT(segmented_data [28] :: STRING, 40)) AS receiverDst,
TRY_TO_NUMBER(
decoded_log :"order" :"takeAmount" :: STRING
) AS takeAmount,
TRY_TO_NUMBER(
decoded_log :"order" :"takeChainId" :: STRING
) AS takeChainId,
decoded_log :"order" :"takeTokenAddress" :: STRING AS takeTokenAddress,
decoded_log :"orderId" :: STRING AS orderId,
TRY_TO_NUMBER(
decoded_log :"percentFee" :: STRING
) AS percentFee,
TRY_TO_NUMBER(
decoded_log :"referralCode" :: STRING
) AS referralCode,
decoded_log,
event_removed,
IFF(tx_succeeded,'SUCCESS','FAIL') AS tx_status,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__ez_decoded_event_logs') }}
WHERE
topics [0] :: STRING = '0xfc8703fd57380f9dd234a89dce51333782d49c5902f307b02f03e014d18fe471' --CreatedOrder
AND contract_address = '0xef4fb24ad0916217251f553c0596f8edc630eb66' --Dln: Source
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
)
SELECT
block_number,
block_timestamp,
origin_function_signature,
origin_from_address,
origin_to_address,
tx_hash,
event_index,
topic_0,
event_name,
event_removed,
tx_status,
contract_address AS bridge_address,
NAME AS platform,
origin_from_address AS sender,
receiverDst AS receiver,
CASE
WHEN takeChainId :: STRING = '7565164' THEN utils.udf_hex_to_base58(CONCAT('0x', segmented_data [28] :: STRING))
ELSE receiverDst
END AS destination_chain_receiver,
giveAmount AS amount,
takeChainId AS destination_chain_id,
CASE
WHEN destination_chain_id :: STRING = '7565164' THEN 'solana'
ELSE NULL
END AS destination_chain,
CASE
WHEN token_address = '0x0000000000000000000000000000000000000000' THEN LOWER('0x0d500B1d8E8eF31E21C99d1Db9A6444d3ADf1270')
ELSE token_address
END AS token_address,
decoded_log,
order_obj,
_log_id,
_inserted_timestamp
FROM
base_evt

View File

@ -1,74 +0,0 @@
version: 2
models:
- name: silver_bridge__dln_debridge_createdorder
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: BLOCK_TIMESTAMP
tests:
- not_null
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null
- name: ORIGIN_FROM_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: ORIGIN_TO_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: EVENT_INDEX
tests:
- not_null
- name: EVENT_NAME
tests:
- not_null
- name: BRIDGE_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: SENDER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: RECEIVER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: DESTINATION_CHAIN_RECEIVER
tests:
- not_null
- name: AMOUNT
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- DECIMAL
- FLOAT
- NUMBER
- name: TOKEN_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: _INSERTED_TIMESTAMP
tests:
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 3

View File

@ -1,144 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = "block_number",
cluster_by = ['block_timestamp::DATE'],
tags = ['silver_bridge','defi','bridge','curated']
) }}
WITH base_evt AS (
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
'eywa' AS NAME,
event_index,
topics [0] :: STRING AS topic_0,
event_name,
decoded_log,
event_removed,
IFF(tx_succeeded,'SUCCESS','FAIL') AS tx_status,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__ez_decoded_event_logs') }}
WHERE
topics [0] :: STRING IN (
'0x5566d73d091d945ab32ea023cd1930c0d43aa43bef9aee4cb029775cfc94bdae',
--RequestSent
'0xb5f411fa3c897c9b0b6cd61852278a67e73d885610724a5610a8580d3e94cfdb'
) --locked
AND contract_address IN (
'0xece9cf6a8f2768a3b8b65060925b646afeaa5167',
--BridgeV2
'0xac8f44ceca92b2a4b30360e5bd3043850a0ffcbe',
--PortalV2
'0xbf0b5d561b986809924f88099c4ff0e6bcce60c9' --PortalV2
)
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
),
requestsent AS (
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
NAME,
event_index,
topic_0,
event_name,
decoded_log :"chainIdTo" :: STRING AS chainIdTo,
decoded_log :"data" :: STRING AS data_requestsent,
decoded_log :"requestId" :: STRING AS requestId,
decoded_log :"to" :: STRING AS to_address,
decoded_log,
event_removed,
tx_status,
_log_id,
_inserted_timestamp
FROM
base_evt
WHERE
topic_0 = '0x5566d73d091d945ab32ea023cd1930c0d43aa43bef9aee4cb029775cfc94bdae' --RequestSent
),
locked AS (
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
NAME,
event_index,
topic_0,
event_name,
TRY_TO_NUMBER(
decoded_log :"amount" :: STRING
) AS amount,
decoded_log :"from" :: STRING AS from_address,
decoded_log :"to" :: STRING AS to_address,
decoded_log :"token" :: STRING AS token,
decoded_log,
event_removed,
tx_status,
_log_id,
_inserted_timestamp
FROM
base_evt
WHERE
topic_0 = '0xb5f411fa3c897c9b0b6cd61852278a67e73d885610724a5610a8580d3e94cfdb' --Locked
)
SELECT
r.block_number,
r.block_timestamp,
r.origin_function_signature,
r.origin_from_address,
r.origin_to_address,
r.tx_hash,
r.event_index,
r.topic_0,
r.event_name,
r.event_removed,
r.tx_status,
r.contract_address AS bridge_address,
r.name AS platform,
l.from_address AS sender,
sender AS receiver,
receiver AS destination_chain_receiver,
l.amount,
r.chainIdTo AS destination_chain_id,
l.token AS token_address,
_log_id,
_inserted_timestamp
FROM
requestsent r
LEFT JOIN locked l USING(
block_number,
tx_hash
)
WHERE token_address IS NOT NULL

View File

@ -1,74 +0,0 @@
version: 2
models:
- name: silver_bridge__eywa_requestsent
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: BLOCK_TIMESTAMP
tests:
- not_null
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null
- name: ORIGIN_FROM_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: ORIGIN_TO_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: EVENT_INDEX
tests:
- not_null
- name: EVENT_NAME
tests:
- not_null
- name: BRIDGE_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: SENDER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: RECEIVER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: DESTINATION_CHAIN_RECEIVER
tests:
- not_null
- name: AMOUNT
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- DECIMAL
- FLOAT
- NUMBER
- name: TOKEN_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: _INSERTED_TIMESTAMP
tests:
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 3

View File

@ -1,120 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = "block_number",
tags = ['silver_bridge','defi','bridge','curated']
) }}
WITH base_contracts AS (
SELECT
contract_address,
MAX(block_number) AS block_number
FROM
{{ ref('core__fact_event_logs') }}
WHERE
topics [0] :: STRING = '0xe35dddd4ea75d7e9b3fe93af4f4e40e778c3da4074c9d93e7c6536f1e803c1eb'
AND tx_succeeded
{% if is_incremental() %}
AND modified_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND contract_address NOT IN (
SELECT
DISTINCT contract_address
FROM
{{ this }}
)
{% endif %}
GROUP BY
1
),
function_sigs AS (
SELECT
'0xe9cdfe51' AS function_sig,
'ammWrapper' AS function_name
),
inputs AS (
SELECT
contract_address,
block_number,
function_sig,
function_name,
0 AS function_input,
CONCAT(
function_sig,
LPAD(
function_input,
64,
0
)
) AS DATA
FROM
base_contracts
JOIN function_sigs
ON 1 = 1
),
contract_reads AS (
SELECT
contract_address,
block_number,
function_sig,
function_name,
function_input,
DATA,
utils.udf_json_rpc_call(
'eth_call',
[{ 'to': contract_address, 'from': null, 'data': data }, utils.udf_int_to_hex(block_number) ]
) AS rpc_request,
live.udf_api(
'POST',
CONCAT(
'{Service}',
'/',
'{Authentication}'
),{},
rpc_request,
'Vault/prod/polygon/quicknode/mainnet'
) AS read_output,
SYSDATE() AS _inserted_timestamp
FROM
inputs
),
reads_flat AS (
SELECT
read_output,
read_output :data :id :: STRING AS read_id,
read_output :data :result :: STRING AS read_result,
SPLIT(
read_id,
'-'
) AS read_id_object,
function_sig,
function_name,
function_input,
DATA,
contract_address,
block_number,
_inserted_timestamp
FROM
contract_reads
)
SELECT
read_output,
read_id,
read_result,
read_id_object,
function_sig,
function_name,
function_input,
DATA,
block_number,
contract_address,
CONCAT('0x', SUBSTR(read_result, 27, 40)) AS amm_wrapper_address,
_inserted_timestamp
FROM
reads_flat

View File

@ -1,16 +0,0 @@
version: 2
models:
- name: silver_bridge__hop_ammwrapper
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- CONTRACT_ADDRESS
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: AMM_WRAPPER_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+

View File

@ -1,119 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = "block_number",
tags = ['silver_bridge','defi','bridge','curated']
) }}
WITH base_contracts AS (
SELECT
contract_address,
amm_wrapper_address,
block_number
FROM
{{ ref('silver_bridge__hop_ammwrapper') }}
{% if is_incremental() %}
WHERE
amm_wrapper_address NOT IN (
SELECT
DISTINCT amm_wrapper_address
FROM
{{ this }}
)
{% endif %}
),
function_sigs AS (
SELECT
'0x1ee1bf67' AS function_sig,
'l2CanonicalToken' AS function_name
),
inputs AS (
SELECT
amm_wrapper_address,
block_number,
function_sig,
function_name,
0 AS function_input,
CONCAT(
function_sig,
LPAD(
function_input,
64,
0
)
) AS DATA
FROM
base_contracts
JOIN function_sigs
ON 1 = 1
),
contract_reads AS (
SELECT
amm_wrapper_address,
block_number,
function_sig,
function_name,
function_input,
DATA,
utils.udf_json_rpc_call(
'eth_call',
[{ 'to': amm_wrapper_address, 'from': null, 'data': data }, utils.udf_int_to_hex(block_number) ]
) AS rpc_request,
live.udf_api(
'POST',
CONCAT(
'{Service}',
'/',
'{Authentication}'
),{},
rpc_request,
'Vault/prod/polygon/quicknode/mainnet'
) AS read_output,
SYSDATE() AS _inserted_timestamp
FROM
inputs
),
reads_flat AS (
SELECT
read_output,
read_output :data :id :: STRING AS read_id,
read_output :data :result :: STRING AS read_result,
SPLIT(
read_id,
'-'
) AS read_id_object,
function_sig,
function_name,
function_input,
DATA,
amm_wrapper_address,
block_number,
_inserted_timestamp
FROM
contract_reads
)
SELECT
read_output,
read_id,
read_result,
read_id_object,
function_sig,
function_name,
function_input,
DATA,
block_number,
contract_address,
amm_wrapper_address,
CASE
WHEN contract_address = '0x58c61aee5ed3d748a1467085ed2650b697a66234' THEN '0xc5102fe9359fd9a28f877a67e36b0f050d81a3cc'
ELSE CONCAT('0x', SUBSTR(read_result, 27, 40))
END AS token_address,
_inserted_timestamp
FROM
reads_flat
LEFT JOIN base_contracts USING(amm_wrapper_address)
WHERE
token_address <> '0x'
AND token_address IS NOT NULL

View File

@ -1,16 +0,0 @@
version: 2
models:
- name: silver_bridge__hop_l2canonicaltoken
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- CONTRACT_ADDRESS
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: TOKEN_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+

View File

@ -1,114 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = "block_number",
cluster_by = ['block_timestamp::DATE'],
tags = ['silver_bridge','defi','bridge','curated']
) }}
WITH base_evt AS (
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
'hop' AS NAME,
event_index,
topics [0] :: STRING AS topic_0,
event_name,
TRY_TO_NUMBER(
decoded_log :"amount" :: STRING
) AS amount,
TRY_TO_NUMBER(
decoded_log :"amountOutMin" :: STRING
) AS amountOutMin,
TRY_TO_NUMBER(
decoded_log :"bonderFee" :: STRING
) AS bonderFee,
TRY_TO_NUMBER(
decoded_log :"chainId" :: STRING
) AS chainId,
TRY_TO_TIMESTAMP(
decoded_log :"deadline" :: STRING
) AS deadline,
TRY_TO_TIMESTAMP(
decoded_log :"index" :: STRING
) AS INDEX,
decoded_log :"recipient" :: STRING AS recipient,
decoded_log :"transferId" :: STRING AS transferId,
decoded_log :"transferNonce" :: STRING AS transferNonce,
decoded_log,
event_removed,
IFF(tx_succeeded,'SUCCESS','FAIL') AS tx_status,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__ez_decoded_event_logs') }}
WHERE
topics [0] :: STRING = '0xe35dddd4ea75d7e9b3fe93af4f4e40e778c3da4074c9d93e7c6536f1e803c1eb'
AND origin_to_address IS NOT NULL
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
),
hop_tokens AS (
SELECT
block_number,
contract_address,
amm_wrapper_address,
token_address,
_inserted_timestamp
FROM
{{ ref('silver_bridge__hop_l2canonicaltoken') }}
)
SELECT
block_number,
block_timestamp,
origin_function_signature,
origin_from_address,
origin_to_address,
tx_hash,
event_index,
topic_0,
event_name,
event_removed,
tx_status,
contract_address AS bridge_address,
amm_wrapper_address,
NAME AS platform,
origin_from_address AS sender,
recipient AS receiver,
receiver AS destination_chain_receiver,
chainId AS destination_chain_id,
token_address,
amount,
amountOutMin AS amount_out_min,
bonderFee AS bonder_fee,
deadline,
INDEX,
transferId AS transfer_id,
transferNonce AS transfer_nonce,
_log_id,
_inserted_timestamp
FROM
base_evt b
LEFT JOIN hop_tokens h USING(contract_address)
WHERE
token_address IS NOT NULL

View File

@ -1,74 +0,0 @@
version: 2
models:
- name: silver_bridge__hop_transfersent
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: BLOCK_TIMESTAMP
tests:
- not_null
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null
- name: ORIGIN_FROM_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: ORIGIN_TO_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: EVENT_INDEX
tests:
- not_null
- name: EVENT_NAME
tests:
- not_null
- name: BRIDGE_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: SENDER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: RECEIVER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: DESTINATION_CHAIN_RECEIVER
tests:
- not_null
- name: AMOUNT
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- DECIMAL
- FLOAT
- NUMBER
- name: TOKEN_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: _INSERTED_TIMESTAMP
tests:
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 3

View File

@ -1,181 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = "block_number",
cluster_by = ['block_timestamp::DATE'],
tags = ['silver_bridge','defi','bridge','curated']
) }}
WITH token_transfers AS (
SELECT
block_number,
block_timestamp,
origin_function_signature,
origin_from_address,
origin_to_address,
tx_hash,
event_index,
contract_address,
from_address,
to_address,
raw_amount,
CONCAT(tx_hash :: STRING, '-', event_index :: STRING) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__ez_token_transfers') }}
WHERE
from_address <> '0x0000000000000000000000000000000000000000'
AND to_address = '0x25ab3efd52e6470681ce037cd546dc60726948d3'
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
),
native_transfers AS (
SELECT
et.block_number,
et.block_timestamp,
et.tx_hash,
tx.from_address AS origin_from_address,
tx.to_address AS origin_to_address,
tx.origin_function_signature,
et.from_address,
et.to_address,
amount_precise_raw,
et.ez_native_transfers_id AS _call_id,
et.modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__ez_native_transfers') }}
et
INNER JOIN {{ ref('core__fact_transactions') }}
tx
ON et.block_number = tx.block_number
AND et.tx_hash = tx.tx_hash
WHERE
et.to_address = '0x25ab3efd52e6470681ce037cd546dc60726948d3'
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
{% endif %}
),
all_transfers AS (
SELECT
block_number,
block_timestamp,
origin_from_address,
origin_to_address,
origin_function_signature,
tx_hash,
event_index,
'Transfer' AS event_name,
to_address AS bridge_address,
from_address AS sender,
to_address AS receiver,
raw_amount AS amount_unadj,
contract_address AS token_address,
{{ dbt_utils.generate_surrogate_key(
['_log_id']
) }} AS _id,
_inserted_timestamp
FROM
token_transfers
UNION ALL
SELECT
block_number,
block_timestamp,
origin_from_address,
origin_to_address,
origin_function_signature,
tx_hash,
NULL AS event_index,
NULL AS event_name,
to_address AS bridge_address,
from_address AS sender,
to_address AS receiver,
amount_precise_raw AS amount_unadj,
'0x0d500b1d8e8ef31e21c99d1db9a6444d3adf1270' AS token_address,
{{ dbt_utils.generate_surrogate_key(
['_call_id']
) }} AS _id,
_inserted_timestamp
FROM
native_transfers
),
dst_info AS (
SELECT
block_number,
tx_hash,
topics [1] :: STRING AS encoded_data,
SUBSTR(RIGHT(encoded_data, 12), 1, 4) AS destination_chain_id,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__fact_event_logs') }}
WHERE
contract_address = '0x25ab3efd52e6470681ce037cd546dc60726948d3'
AND topics [0] :: STRING = '0x5ce4019f772fda6cb703b26bce3ec3006eb36b73f1d3a0eb441213317d9f5e9d'
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '16 hours'
FROM
{{ this }}
)
{% endif %}
)
SELECT
t.block_number,
block_timestamp,
origin_from_address,
origin_to_address,
origin_function_signature,
t.tx_hash,
event_index,
event_name,
'meson' AS platform,
bridge_address,
sender,
receiver,
CASE
WHEN origin_from_address = '0x0000000000000000000000000000000000000000' THEN sender
ELSE origin_from_address
END AS destination_chain_receiver,
amount_unadj,
destination_chain_id,
COALESCE(LOWER(chain),'other') AS destination_chain,
token_address,
_id,
t._inserted_timestamp
FROM
all_transfers t
INNER JOIN dst_info d
ON t.tx_hash = d.tx_hash
AND t.block_number = d.block_number
LEFT JOIN {{ ref('silver_bridge__meson_chain_id_seed') }}
s
ON d.destination_chain_id :: STRING = RIGHT(
s.short_coin_type,
4
) :: STRING
WHERE
origin_to_address IS NOT NULL qualify (ROW_NUMBER() over (PARTITION BY _id
ORDER BY
t._inserted_timestamp DESC)) = 1

View File

@ -1,52 +0,0 @@
version: 2
models:
- name: silver_bridge__meson_transfers
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: BLOCK_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_LTZ
- TIMESTAMP_NTZ
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TOKEN_ADDRESS
tests:
- not_null
- name: SENDER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: DESTINATION_CHAIN_RECEIVER
tests:
- not_null
- name: BRIDGE_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: AMOUNT_UNADJ
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- DECIMAL
- FLOAT
- NUMBER
- name: _INSERTED_TIMESTAMP
tests:
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 3

View File

@ -1,85 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = "block_number",
cluster_by = ['block_timestamp::DATE'],
tags = ['silver_bridge','defi','bridge','curated']
) }}
WITH base_evt AS (
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
'multichain' AS NAME,
event_index,
topics [0] :: STRING AS topic_0,
event_name,
TRY_TO_NUMBER(
decoded_log :"amount" :: STRING
) AS amount,
decoded_log :"from" :: STRING AS from_address,
decoded_log :"receiver" :: STRING AS receiver,
decoded_log :"swapoutID" :: STRING AS swapoutID,
TRY_TO_NUMBER(
decoded_log :"toChainID" :: STRING
) AS toChainID,
decoded_log :"token" :: STRING AS token,
decoded_log,
event_removed,
IFF(tx_succeeded,'SUCCESS','FAIL') AS tx_status,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__ez_decoded_event_logs') }}
WHERE
topics [0] :: STRING = '0x0d969ae475ff6fcaf0dcfa760d4d8607244e8d95e9bf426f8d5d69f9a3e525af'
AND contract_address = '0x1633d66ca91ce4d81f63ea047b7b19beb92df7f3'
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
)
SELECT
block_number,
block_timestamp,
origin_function_signature,
origin_from_address,
origin_to_address,
tx_hash,
event_index,
topic_0,
event_name,
event_removed,
tx_status,
contract_address AS bridge_address,
NAME AS platform,
LOWER(from_address) AS sender,
LOWER(receiver) AS receiver,
LOWER(receiver) AS destination_chain_receiver,
amount,
toChainID AS destination_chain_id,
token AS token_address,
swapoutID AS swapout_id,
_log_id,
_inserted_timestamp
FROM
base_evt
WHERE destination_chain_id <> 0

View File

@ -1,69 +0,0 @@
version: 2
models:
- name: silver_bridge__multichain_v7_loganyswapout
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: BLOCK_TIMESTAMP
tests:
- not_null
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null
- name: ORIGIN_FROM_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: ORIGIN_TO_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: EVENT_INDEX
tests:
- not_null
- name: EVENT_NAME
tests:
- not_null
- name: BRIDGE_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: SENDER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: RECEIVER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: DESTINATION_CHAIN_RECEIVER
tests:
- not_null
- name: AMOUNT
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- DECIMAL
- FLOAT
- NUMBER
- name: TOKEN_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+

View File

@ -1,81 +0,0 @@
version: 2
models:
- name: silver_bridge__complete_bridge_activity
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: BLOCK_TIMESTAMP
tests:
- not_null
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null
- name: ORIGIN_FROM_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: ORIGIN_TO_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: EVENT_INDEX
tests:
- not_null:
where: VERSION <> 'v1-native' AND PLATFORM NOT IN ('wormhole','meson')
- name: EVENT_NAME
tests:
- not_null:
where: VERSION <> 'v1-native' AND PLATFORM NOT IN ('wormhole','meson')
- name: BRIDGE_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: DESTINATION_CHAIN
- name: DESTINATION_CHAIN_RECEIVER
tests:
- not_null
- name: PLATFORM
- not_null
- name: VERSION
- not_null
- name: SENDER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: RECEIVER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: AMOUNT_UNADJ
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- DECIMAL
- FLOAT
- NUMBER
- name: TOKEN_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: _INSERTED_TIMESTAMP
tests:
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 3

View File

@ -1,134 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = "block_number",
tags = ['silver_bridge','defi','bridge','curated']
) }}
WITH base_contracts AS (
SELECT
tx_hash,
block_number,
block_timestamp,
from_address,
to_address AS contract_address,
concat_ws(
'-',
block_number,
tx_position,
CONCAT(
TYPE,
'_',
trace_address
)
) AS _call_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__fact_traces') }}
WHERE
from_address = LOWER('0x808d7c71ad2ba3FA531b068a2417C63106BC0949')
AND TYPE ILIKE 'create%'
AND trace_succeeded
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND to_address NOT IN (
SELECT
DISTINCT pool_address
FROM
{{ this }}
)
{% endif %}
),
function_sigs AS (
SELECT
'0xfc0c546a' AS function_sig,
'token' AS function_name
),
inputs AS (
SELECT
contract_address,
block_number,
function_sig,
function_name,
0 AS function_input,
CONCAT(
function_sig,
LPAD(
function_input,
64,
0
)
) AS DATA
FROM
base_contracts
JOIN function_sigs
ON 1 = 1
),
contract_reads AS (
SELECT
contract_address,
block_number,
function_sig,
function_name,
function_input,
DATA,
utils.udf_json_rpc_call(
'eth_call',
[{ 'to': contract_address, 'from': null, 'data': data }, utils.udf_int_to_hex(block_number) ]
) AS rpc_request,
live.udf_api(
'POST',
CONCAT(
'{Service}',
'/',
'{Authentication}'
),{},
rpc_request,
'Vault/prod/polygon/quicknode/mainnet'
) AS read_output,
SYSDATE() AS _inserted_timestamp
FROM
inputs
),
reads_flat AS (
SELECT
read_output,
read_output :data :id :: STRING AS read_id,
read_output :data :result :: STRING AS read_result,
SPLIT(
read_id,
'-'
) AS read_id_object,
function_sig,
function_name,
function_input,
DATA,
contract_address,
block_number,
_inserted_timestamp
FROM
contract_reads
)
SELECT
read_output,
read_id,
read_result,
read_id_object,
function_sig,
function_name,
function_input,
DATA,
block_number,
contract_address AS pool_address,
CONCAT('0x', SUBSTR(read_result, 27, 40)) AS token_address,
_inserted_timestamp
FROM
reads_flat

View File

@ -1,16 +0,0 @@
version: 2
models:
- name: silver_bridge__stargate_createpool
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- POOL_ADDRESS
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: TOKEN_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+

View File

@ -1,118 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = "block_number",
cluster_by = ['block_timestamp::DATE'],
tags = ['silver_bridge','defi','bridge','curated']
) }}
WITH pools AS (
SELECT
pool_address,
LOWER(token_address) AS token_address
FROM
{{ ref('silver_bridge__stargate_createpool') }}
),
base_evt AS (
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
'stargate' AS NAME,
event_index,
topics [0] :: STRING AS topic_0,
event_name,
TRY_TO_NUMBER(
decoded_log :"amountSD" :: STRING
) AS amountSD,
TRY_TO_NUMBER(
decoded_log :"chainId" :: STRING
) AS chainId,
CASE
WHEN chainId < 100 THEN chainId + 100
ELSE chainId
END AS destination_chain_id,
TRY_TO_NUMBER(
decoded_log :"dstPoolId" :: STRING
) AS dstPoolId,
TRY_TO_NUMBER(
decoded_log :"eqFee" :: STRING
) AS eqFee,
TRY_TO_NUMBER(
decoded_log :"eqReward" :: STRING
) AS eqReward,
TRY_TO_NUMBER(
decoded_log :"amountSD" :: STRING
) AS lpFee,
TRY_TO_NUMBER(
decoded_log :"amountSD" :: STRING
) AS protocolFee,
decoded_log :"from" :: STRING AS from_address,
decoded_log,
token_address,
event_removed,
IFF(tx_succeeded,'SUCCESS','FAIL') AS tx_status,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__ez_decoded_event_logs') }}
d
INNER JOIN pools p
ON d.contract_address = p.pool_address
WHERE
topics [0] :: STRING = '0x34660fc8af304464529f48a778e03d03e4d34bcd5f9b6f0cfbf3cd238c642f7f'
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
)
SELECT
block_number,
block_timestamp,
origin_function_signature,
origin_from_address,
origin_to_address,
tx_hash,
event_index,
topic_0,
event_name,
event_removed,
tx_status,
LOWER('0x9d1B1669c73b033DFe47ae5a0164Ab96df25B944') AS bridge_address,
NAME AS platform,
from_address AS sender,
from_address AS receiver,
receiver AS destination_chain_receiver,
amountSD AS amount_unadj,
destination_chain_id,
LOWER(chain_name) AS destination_chain,
dstPoolId AS destination_pool_id,
eqFee AS fee,
eqReward AS reward,
lpFee AS lp_fee,
protocolFee AS protocol_fee,
token_address,
_log_id,
_inserted_timestamp
FROM
base_evt b
LEFT JOIN {{ ref('silver_bridge__stargate_chain_id_seed') }}
s
ON b.destination_chain_id :: STRING = s.chain_id :: STRING

View File

@ -1,74 +0,0 @@
version: 2
models:
- name: silver_bridge__stargate_swap
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: BLOCK_TIMESTAMP
tests:
- not_null
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null
- name: ORIGIN_FROM_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: ORIGIN_TO_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: EVENT_INDEX
tests:
- not_null
- name: EVENT_NAME
tests:
- not_null
- name: BRIDGE_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: SENDER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: RECEIVER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: DESTINATION_CHAIN_RECEIVER
tests:
- not_null
- name: AMOUNT_UNADJ
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- DECIMAL
- FLOAT
- NUMBER
- name: TOKEN_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: _INSERTED_TIMESTAMP
tests:
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 3

View File

@ -1,89 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = "block_number",
cluster_by = ['block_timestamp::DATE'],
tags = ['silver_bridge','defi','bridge','curated']
) }}
WITH base_evt AS (
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
'symbiosis' AS NAME,
event_index,
topics [0] :: STRING AS topic_0,
event_name,
TRY_TO_NUMBER(
decoded_log :"amount" :: STRING
) AS amount,
TRY_TO_NUMBER(
decoded_log :"chainID" :: STRING
) AS chainID,
decoded_log :"from" :: STRING AS from_address,
decoded_log :"id" :: STRING AS id,
decoded_log :"revertableAddress" :: STRING AS revertableAddress,
decoded_log :"to" :: STRING AS to_address,
decoded_log :"token" :: STRING AS token,
decoded_log,
event_removed,
IFF(tx_succeeded,'SUCCESS','FAIL') AS tx_status,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__ez_decoded_event_logs') }}
WHERE
topics [0] :: STRING = '0x31325fe0a1a2e6a5b1e41572156ba5b4e94f0fae7e7f63ec21e9b5ce1e4b3eab'
AND contract_address IN (
'0xb8f275fbf7a959f4bce59999a2ef122a099e81a8',
'0x3338be49a5f60e2593337919f9ad7098e9a7dd7e'
)
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
)
SELECT
block_number,
block_timestamp,
origin_function_signature,
origin_from_address,
origin_to_address,
tx_hash,
event_index,
topic_0,
event_name,
event_removed,
tx_status,
contract_address AS bridge_address,
NAME AS platform,
from_address AS sender,
to_address AS receiver,
receiver AS destination_chain_receiver,
amount,
chainID AS destination_chain_id,
id,
revertableAddress AS revertable_address,
token AS token_address,
_log_id,
_inserted_timestamp
FROM
base_evt

View File

@ -1,74 +0,0 @@
version: 2
models:
- name: silver_bridge__symbiosis_synthesizerequest
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: BLOCK_TIMESTAMP
tests:
- not_null
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null
- name: ORIGIN_FROM_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: ORIGIN_TO_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: EVENT_INDEX
tests:
- not_null
- name: EVENT_NAME
tests:
- not_null
- name: BRIDGE_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: SENDER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: RECEIVER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: DESTINATION_CHAIN_RECEIVER
tests:
- not_null
- name: AMOUNT
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- DECIMAL
- FLOAT
- NUMBER
- name: TOKEN_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: _INSERTED_TIMESTAMP
tests:
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 3

View File

@ -1,92 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = "block_number",
cluster_by = ['block_timestamp::DATE'],
tags = ['silver_bridge','defi','bridge','curated']
) }}
WITH base_evt AS (
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
'synapse' AS NAME,
event_index,
topics [0] :: STRING AS topic_0,
event_name,
TRY_TO_NUMBER(
decoded_log :"amount" :: STRING
) AS amount,
TRY_TO_NUMBER(
decoded_log :"chainId" :: STRING
) AS chainId,
decoded_log :"to" :: STRING AS to_address,
decoded_log :"token" :: STRING AS token,
decoded_log,
event_removed,
IFF(tx_succeeded,'SUCCESS','FAIL') AS tx_status,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__ez_decoded_event_logs') }}
WHERE
topics [0] :: STRING IN (
'0xdc5bad4651c5fbe9977a696aadc65996c468cde1448dd468ec0d83bf61c4b57c',
--redeem
'0xda5273705dbef4bf1b902a131c2eac086b7e1476a8ab0cb4da08af1fe1bd8e3b' --deposit
)
AND contract_address IN (
'0x8f5bbb2bb8c2ee94639e55d5f41de9b4839c1280',
'0x2119a5c9279a13ec0de5e30d572b316f1cfca567',
'0x0efc29e196da2e81afe96edd041bedcdf9e74893',
'0x5f06745ee8a2001198a379bafbd0361475f3cfc3',
'0x7103a324f423b8a4d4cc1c4f2d5b374af4f0bab5'
)
AND origin_to_address IS NOT NULL
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
)
SELECT
block_number,
block_timestamp,
origin_function_signature,
origin_from_address,
origin_to_address,
tx_hash,
event_index,
topic_0,
event_name,
event_removed,
tx_status,
contract_address AS bridge_address,
NAME AS platform,
amount,
origin_from_address AS sender,
to_address AS receiver,
receiver AS destination_chain_receiver,
chainId AS destination_chain_id,
token AS token_address,
_log_id,
_inserted_timestamp
FROM
base_evt

View File

@ -1,69 +0,0 @@
version: 2
models:
- name: silver_bridge__synapse_token_bridge
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: BLOCK_TIMESTAMP
tests:
- not_null
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null
- name: ORIGIN_FROM_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: ORIGIN_TO_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: EVENT_INDEX
tests:
- not_null
- name: EVENT_NAME
tests:
- not_null
- name: BRIDGE_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: SENDER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: RECEIVER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: DESTINATION_CHAIN_RECEIVER
tests:
- not_null
- name: AMOUNT
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- DECIMAL
- FLOAT
- NUMBER
- name: TOKEN_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+

View File

@ -1,100 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = "block_number",
cluster_by = ['block_timestamp::DATE'],
tags = ['silver_bridge','defi','bridge','curated']
) }}
WITH base_evt AS (
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
'synapse' AS NAME,
event_index,
topics [0] :: STRING AS topic_0,
event_name,
TRY_TO_NUMBER(
decoded_log :"amount" :: STRING
) AS amount,
TRY_TO_NUMBER(
decoded_log :"chainId" :: STRING
) AS chainId,
TRY_TO_TIMESTAMP(
decoded_log :"deadline" :: STRING
) AS deadline,
TRY_TO_NUMBER(
decoded_log :"minDy" :: STRING
) AS minDy,
decoded_log :"to" :: STRING AS to_address,
decoded_log :"token" :: STRING AS token,
TRY_TO_NUMBER(
decoded_log :"tokenIndexFrom" :: STRING
) AS tokenIndexFrom,
TRY_TO_NUMBER(
decoded_log :"tokenIndexTo" :: STRING
) AS tokenIndexTo,
decoded_log,
event_removed,
IFF(tx_succeeded,'SUCCESS','FAIL') AS tx_status,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__ez_decoded_event_logs') }}
WHERE
topics [0] :: STRING = '0x91f25e9be0134ec851830e0e76dc71e06f9dade75a9b84e9524071dbbc319425'
AND contract_address IN (
'0x8f5bbb2bb8c2ee94639e55d5f41de9b4839c1280',
'0x0efc29e196da2e81afe96edd041bedcdf9e74893'
)
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
)
SELECT
block_number,
block_timestamp,
origin_function_signature,
origin_from_address,
origin_to_address,
tx_hash,
event_index,
topic_0,
event_name,
event_removed,
tx_status,
contract_address AS bridge_address,
NAME AS platform,
origin_from_address AS sender,
to_address AS receiver,
receiver AS destination_chain_receiver,
amount,
chainId AS destination_chain_id,
token AS token_address,
deadline,
minDy AS min_dy,
tokenIndexFrom AS token_index_from,
tokenIndexTo AS token_index_to,
_log_id,
_inserted_timestamp
FROM
base_evt

View File

@ -1,69 +0,0 @@
version: 2
models:
- name: silver_bridge__synapse_tokenbridgeandswap
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: BLOCK_TIMESTAMP
tests:
- not_null
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null
- name: ORIGIN_FROM_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: ORIGIN_TO_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: EVENT_INDEX
tests:
- not_null
- name: EVENT_NAME
tests:
- not_null
- name: BRIDGE_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: SENDER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: RECEIVER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: DESTINATION_CHAIN_RECEIVER
tests:
- not_null
- name: AMOUNT
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- DECIMAL
- FLOAT
- NUMBER
- name: TOKEN_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+

View File

@ -1,243 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = "block_number",
cluster_by = ['block_timestamp::DATE'],
tags = ['silver_bridge','defi','bridge','curated']
) }}
WITH token_transfers AS (
SELECT
tr.block_number,
tr.block_timestamp,
tr.origin_function_signature,
tr.origin_from_address,
tr.origin_to_address,
tr.tx_hash,
event_index,
tr.contract_address,
tr.from_address,
tr.to_address,
raw_amount,
regexp_substr_all(SUBSTR(input_data, 11, len(input_data)), '.{64}') AS segmented_data,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [2] :: STRING
)
) AS destination_chain_id,
CONCAT(
'0x',
segmented_data [3] :: STRING
) AS recipient1,
CONCAT('0x', SUBSTR(segmented_data [3] :: STRING, 25, 40)) AS recipient2,
LENGTH(
REGEXP_SUBSTR(
segmented_data [3] :: STRING,
'^(0*)'
)
) AS len,
CASE
WHEN len >= 24 THEN recipient2
ELSE recipient1
END AS destination_recipient_address,
CONCAT('0x', SUBSTR(segmented_data [0] :: STRING, 25, 40)) AS token,
TRY_TO_NUMBER(utils.udf_hex_to_int(segmented_data [1] :: STRING)) AS amount,
utils.udf_hex_to_int(
segmented_data [4] :: STRING
) AS arbiterFee,
utils.udf_hex_to_int(
segmented_data [5] :: STRING
) AS nonce,
CONCAT(tr.tx_hash :: STRING, '-', tr.event_index :: STRING) AS _log_id,
tr.modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__ez_token_transfers') }}
tr
INNER JOIN {{ ref('core__fact_transactions') }}
tx
ON tr.block_number = tx.block_number
AND tr.tx_hash = tx.tx_hash
WHERE
tr.from_address <> '0x0000000000000000000000000000000000000000'
AND tr.to_address = LOWER('0x5a58505a96D1dbf8dF91cB21B54419FC36e93fdE')
AND tr.origin_function_signature = '0x0f5287b0' -- tokenTransfer
AND destination_chain_id <> 0
{% if is_incremental() %}
AND tr.modified_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
{% endif %}
),
native_transfers AS (
SELECT
et.block_number,
et.block_timestamp,
et.tx_hash,
tx.from_address AS origin_from_address,
tx.to_address AS origin_to_address,
tx.origin_function_signature,
et.from_address,
et.to_address,
amount_precise_raw,
regexp_substr_all(SUBSTR(input_data, 11, len(input_data)), '.{64}') AS segmented_data,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [0] :: STRING
)
) AS destination_chain_id,
CONCAT(
'0x',
segmented_data [1] :: STRING
) AS recipient1,
CONCAT('0x', SUBSTR(segmented_data [1] :: STRING, 25, 40)) AS recipient2,
LENGTH(
REGEXP_SUBSTR(
segmented_data [1] :: STRING,
'^(0*)'
)
) AS len,
CASE
WHEN len >= 24 THEN recipient2
ELSE recipient1
END AS destination_recipient_address,
utils.udf_hex_to_int(
segmented_data [2] :: STRING
) AS arbiterFee,
utils.udf_hex_to_int(
segmented_data [3] :: STRING
) AS nonce,
et.ez_native_transfers_id AS _call_id,
et.modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__ez_native_transfers') }}
et
INNER JOIN {{ ref('core__fact_transactions') }}
tx
ON et.block_number = tx.block_number
AND et.tx_hash = tx.tx_hash
WHERE
et.to_address = LOWER('0x5a58505a96D1dbf8dF91cB21B54419FC36e93fdE')
AND tx.origin_function_signature = '0x9981509f' -- wrapAndTransfer
AND destination_chain_id <> 0
{% if is_incremental() %}
AND et.modified_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
{% endif %}
),
all_transfers AS (
SELECT
block_number,
block_timestamp,
origin_from_address,
origin_to_address,
origin_function_signature,
tx_hash,
event_index,
'Transfer' AS event_name,
to_address AS bridge_address,
from_address AS sender,
to_address AS receiver,
raw_amount AS amount_unadj,
destination_chain_id,
contract_address AS token_address,
destination_recipient_address,
{{ dbt_utils.generate_surrogate_key(
['_log_id']
) }} AS _id,
_inserted_timestamp
FROM
token_transfers
UNION ALL
SELECT
block_number,
block_timestamp,
origin_from_address,
origin_to_address,
origin_function_signature,
tx_hash,
NULL AS event_index,
NULL AS event_name,
to_address AS bridge_address,
from_address AS sender,
to_address AS receiver,
amount_precise_raw AS amount_unadj,
destination_chain_id,
'0x0d500b1d8e8ef31e21c99d1db9a6444d3adf1270' AS token_address,
destination_recipient_address,
{{ dbt_utils.generate_surrogate_key(
['_call_id']
) }} AS _id,
_inserted_timestamp
FROM
native_transfers
),
base_near AS (
SELECT
near_address,
addr_encoded
FROM
{{ source(
'crosschain_silver',
'near_address_encoded'
) }}
)
SELECT
block_number,
block_timestamp,
origin_from_address,
origin_to_address,
origin_function_signature,
tx_hash,
event_index,
event_name,
'wormhole' AS platform,
bridge_address,
sender,
receiver,
amount_unadj,
destination_chain_id,
chain_name AS destination_chain,
token_address,
destination_recipient_address,
--hex address on the destination chain, requires decoding for non-EVM - more info: https://docs.wormhole.com/wormhole/blockchain-environments/environments
CASE
WHEN destination_chain = 'solana' THEN utils.udf_hex_to_base58(destination_recipient_address)
WHEN destination_chain IN ('injective','sei')
THEN utils.udf_hex_to_bech32(destination_recipient_address,SUBSTR(destination_chain,1,3))
WHEN destination_chain IN ('osmosis','xpla')
THEN utils.udf_hex_to_bech32(destination_recipient_address,SUBSTR(destination_chain,1,4))
WHEN destination_chain IN ('terra','terra2','evmos')
THEN utils.udf_hex_to_bech32(destination_recipient_address,SUBSTR(destination_chain,1,5))
WHEN destination_chain IN ('cosmoshub','kujira')
THEN utils.udf_hex_to_bech32(destination_recipient_address,SUBSTR(destination_chain,1,6))
WHEN destination_chain IN ('near')
THEN near_address
WHEN destination_chain IN ('algorand')
THEN utils.udf_hex_to_algorand(destination_recipient_address)
WHEN destination_chain IN ('polygon')
THEN SUBSTR(destination_recipient_address,1,42)
ELSE destination_recipient_address
END AS destination_chain_receiver,
_id,
_inserted_timestamp
FROM
all_transfers t
LEFT JOIN {{ ref('silver_bridge__wormhole_chain_id_seed') }}
s
ON t.destination_chain_id :: STRING = s.wormhole_chain_id :: STRING
LEFT JOIN base_near n
ON t.destination_recipient_address = n.addr_encoded
WHERE
origin_to_address IS NOT NULL qualify (ROW_NUMBER() over (PARTITION BY _id
ORDER BY
_inserted_timestamp DESC)) = 1

View File

@ -1,52 +0,0 @@
version: 2
models:
- name: silver_bridge__wormhole_transfers
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: BLOCK_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_LTZ
- TIMESTAMP_NTZ
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TOKEN_ADDRESS
tests:
- not_null
- name: SENDER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: BRIDGE_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: AMOUNT_UNADJ
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- DECIMAL
- FLOAT
- NUMBER
- name: DESTINATION_CHAIN_RECEIVER
tests:
- not_null
- name: _INSERTED_TIMESTAMP
tests:
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 3

View File

@ -1,261 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = 'pool_address',
full_refresh = false,
tags = ['silver_dex','defi','dex','curated']
) }}
WITH pools_registered AS (
SELECT
block_number,
block_timestamp,
event_index,
tx_hash,
contract_address,
topics [1] :: STRING AS pool_id,
SUBSTR(
topics [1] :: STRING,
1,
42
) AS pool_address,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp,
ROW_NUMBER() over (
ORDER BY
pool_address
) AS row_num
FROM
{{ ref('core__fact_event_logs') }}
WHERE
topics [0] :: STRING = '0x3c13bc30b8e878c53fd2a36b679409c073afd75950be43d8858768e956fbc20e' --PoolRegistered
AND contract_address = '0xba12222222228d8ba445958a75a0704d566bf2c8'
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
),
tokens_registered AS (
SELECT
block_number,
block_timestamp,
event_index,
tx_hash,
contract_address,
decoded_log :poolId :: STRING AS pool_id,
decoded_log :tokens AS tokens,
tokens [0] :: STRING AS token0,
tokens [1] :: STRING AS token1,
tokens [2] :: STRING AS token2,
tokens [3] :: STRING AS token3,
tokens [4] :: STRING AS token4,
tokens [5] :: STRING AS token5,
tokens [6] :: STRING AS token6,
tokens [7] :: STRING AS token7,
decoded_log :assetManagers AS asset_managers,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__ez_decoded_event_logs') }}
WHERE
topics [0] :: STRING = '0xf5847d3f2197b16cdcd2098ec95d0905cd1abdaf415f07bb7cef2bba8ac5dec4' --TokensRegistered
AND contract_address = '0xba12222222228d8ba445958a75a0704d566bf2c8'
AND tx_hash IN (
SELECT
tx_hash
FROM
pools_registered
)
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
),
function_sigs AS (
SELECT
'0x06fdde03' AS function_sig,
'name' AS function_name
UNION ALL
SELECT
'0x95d89b41' AS function_sig,
'symbol' AS function_name
UNION ALL
SELECT
'0x313ce567' AS function_sig,
'decimals' AS function_name
),
inputs_pools AS (
SELECT
pool_address,
block_number,
function_sig
FROM
pools_registered
JOIN function_sigs
ON 1 = 1
),
build_rpc_requests AS (
SELECT
pool_address,
block_number,
function_sig,
RPAD(
function_sig,
64,
'0'
) AS input,
utils.udf_json_rpc_call(
'eth_call',
[{'to': pool_address, 'from': null, 'data': input}, utils.udf_int_to_hex(block_number)],
concat_ws(
'-',
pool_address,
input,
block_number
)
) AS rpc_request,
row_num,
CEIL(
row_num / 250
) AS batch_no
FROM
inputs_pools
LEFT JOIN pools_registered USING(pool_address)
),
pool_token_reads AS ({% for item in range(10) %}
(
SELECT
live.udf_api('POST', CONCAT('{Service}', '/', '{Authentication}'),{}, batch_rpc_request, 'Vault/prod/polygon/quicknode/mainnet') AS read_output, SYSDATE() AS _inserted_timestamp
FROM
(
SELECT
ARRAY_AGG(rpc_request) batch_rpc_request
FROM
build_rpc_requests
WHERE
batch_no = {{ item }} + 1
AND batch_no IN (
SELECT
DISTINCT batch_no
FROM
build_rpc_requests))) {% if not loop.last %}
UNION ALL
{% endif %}
{% endfor %}),
reads_adjusted AS (
SELECT
VALUE :id :: STRING AS read_id,
VALUE :result :: STRING AS read_result,
SPLIT(
read_id,
'-'
) AS read_id_object,
read_id_object [0] :: STRING AS pool_address,
LEFT(
read_id_object [1] :: STRING,
10
) AS function_sig,
read_id_object [2] :: STRING AS block_number,
_inserted_timestamp
FROM
pool_token_reads,
LATERAL FLATTEN(
input => read_output :data
)
),
pool_details AS (
SELECT
pool_address,
function_sig,
function_name,
read_result,
regexp_substr_all(SUBSTR(read_result, 3, len(read_result)), '.{64}') AS segmented_output,
_inserted_timestamp
FROM
reads_adjusted
LEFT JOIN function_sigs USING(function_sig)
),
FINAL AS (
SELECT
pool_address,
MIN(
CASE
WHEN function_name = 'symbol' THEN utils.udf_hex_to_string(
segmented_output [2] :: STRING
)
END
) AS pool_symbol,
MIN(
CASE
WHEN function_name = 'name' THEN utils.udf_hex_to_string(
segmented_output [2] :: STRING
)
END
) AS pool_name,
MIN(
CASE
WHEN read_result :: STRING = '0x' THEN NULL
ELSE utils.udf_hex_to_int(LEFT(read_result :: STRING, 66))
END
) :: INTEGER AS pool_decimals,
MAX(_inserted_timestamp) AS _inserted_timestamp
FROM
pool_details
GROUP BY
1
)
SELECT
p.block_number,
p.block_timestamp,
p.event_index,
p.tx_hash,
p.contract_address,
p.pool_id,
f.pool_address,
f.pool_symbol,
f.pool_name,
f.pool_decimals,
t.token0,
t.token1,
t.token2,
t.token3,
t.token4,
t.token5,
t.token6,
t.token7,
t.asset_managers,
p._log_id,
f._inserted_timestamp
FROM
FINAL f
LEFT JOIN pools_registered p
ON f.pool_address = p.pool_address
LEFT JOIN tokens_registered t
ON p.pool_id = t.pool_id
WHERE
t.token0 IS NOT NULL qualify(ROW_NUMBER() over(PARTITION BY f.pool_address
ORDER BY
f._inserted_timestamp DESC)) = 1

View File

@ -1,12 +0,0 @@
version: 2
models:
- name: silver_dex__balancer_pools
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- POOL_ADDRESS

View File

@ -1,100 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = 'block_number',
cluster_by = ['block_timestamp::DATE'],
tags = ['silver_dex','defi','dex','curated']
) }}
WITH pool_name AS (
SELECT
CASE
WHEN pool_name IS NULL THEN pool_symbol
ELSE pool_name
END AS pool_name,
pool_address
FROM
{{ ref('silver_dex__balancer_pools') }}
),
swaps_base AS (
SELECT
tx_hash,
block_number,
block_timestamp,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
'Swap' AS event_name,
event_index,
regexp_substr_all(SUBSTR(DATA, 3, len(DATA)), '.{64}') AS segmented_data,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [0] :: STRING
)
) AS amount_in_unadj,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [1] :: STRING
)
) AS amount_out_unadj,
topics [1] :: STRING AS pool_id,
CONCAT('0x', SUBSTR(topics [2] :: STRING, 27, 40)) AS token_in,
CONCAT('0x', SUBSTR(topics [3] :: STRING, 27, 40)) AS token_out,
SUBSTR(
topics [1] :: STRING,
1,
42
) AS pool_address,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp,
'balancer' AS platform,
origin_from_address AS sender,
origin_from_address AS tx_to
FROM
{{ ref('core__fact_event_logs') }}
WHERE
topics [0] :: STRING = '0x2170c741c41531aec20e7c107c24eecfdd15e69c9bb0a8dd37b1840b9e0b207b'
AND contract_address = '0xba12222222228d8ba445958a75a0704d566bf2c8'
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
)
SELECT
tx_hash,
block_number,
block_timestamp,
origin_function_signature,
origin_from_address,
origin_to_address,
event_index,
amount_in_unadj,
amount_out_unadj,
token_in,
token_out,
sender,
tx_to,
pool_id,
s.pool_address AS contract_address,
pool_name,
event_name,
platform,
_log_id,
_inserted_timestamp
FROM
swaps_base s
INNER JOIN pool_name pn
ON pn.pool_address = s.pool_address

View File

@ -1,66 +0,0 @@
version: 2
models:
- name: silver_dex__balancer_swaps
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _log_id
columns:
- name: AMOUNT_IN_UNADJ
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- decimal
- float
- number
- name: AMOUNT_OUT_UNADJ
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- decimal
- float
- number
- name: TOKEN_IN
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TOKEN_OUT
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: BLOCK_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 1
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_LTZ
- TIMESTAMP_NTZ
- name: EVENT_INDEX
tests:
- not_null
- name: CONTRACT_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: POOL_NAME
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- varchar
- name: SENDER
tests:
- not_null
- name: TX_TO
tests:
- not_null
- name: TX_HASH
tests:
- not_null

View File

@ -1,440 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = 'pool_address',
full_refresh = false,
tags = ['silver_dex','defi','dex','curated']
) }}
WITH contract_deployments AS (
SELECT
tx_hash,
block_number,
block_timestamp,
from_address AS deployer_address,
to_address AS contract_address,
concat_ws(
'-',
block_number,
tx_position,
CONCAT(
TYPE,
'_',
trace_address
)
) AS _call_id,
modified_timestamp AS _inserted_timestamp,
ROW_NUMBER() over (
ORDER BY
contract_address
) AS row_num
FROM
{{ ref(
'core__fact_traces'
) }}
WHERE
-- curve contract deployers
from_address IN (
'0x7eeac6cddbd1d0b8af061742d41877d7f707289a',
'0xbabe61887f1de2713c6f97e567623453d3c79f67',
'0x722272d36ef0da72ff51c5a65db7b870e2e8d4ee',
'0xe5de15a9c9bbedb4f5ec13b131e61245f2983a69'
)
AND TYPE ILIKE 'create%'
AND trace_succeeded
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
qualify(ROW_NUMBER() over(PARTITION BY to_address
ORDER BY
block_timestamp ASC)) = 1
),
function_sigs AS (
SELECT
'0x87cb4f57' AS function_sig,
'base_coins' AS function_name
UNION ALL
SELECT
'0xb9947eb0' AS function_sig,
'underlying_coins' AS function_name
UNION ALL
SELECT
'0xc6610657' AS function_sig,
'coins' AS function_name
UNION ALL
SELECT
'0x06fdde03' AS function_sig,
'name' AS function_name
UNION ALL
SELECT
'0x95d89b41' AS function_sig,
'symbol' AS function_name
UNION ALL
SELECT
'0x313ce567' AS function_sig,
'decimals' AS function_name
),
function_inputs AS (
SELECT
SEQ4() AS function_input
FROM
TABLE(GENERATOR(rowcount => 8))
),
inputs_coins AS (
SELECT
deployer_address,
contract_address,
block_number,
function_sig,
(ROW_NUMBER() over (PARTITION BY contract_address
ORDER BY
block_number)) - 1 AS function_input
FROM
contract_deployments
JOIN function_sigs
ON 1 = 1
JOIN function_inputs
ON 1 = 1
WHERE
function_name = 'coins'
),
inputs_base_coins AS (
SELECT
deployer_address,
contract_address,
block_number,
function_sig,
(ROW_NUMBER() over (PARTITION BY contract_address
ORDER BY
block_number)) - 1 AS function_input
FROM
contract_deployments
JOIN function_sigs
ON 1 = 1
JOIN function_inputs
ON 1 = 1
WHERE
function_name = 'base_coins'
),
inputs_underlying_coins AS (
SELECT
deployer_address,
contract_address,
block_number,
function_sig,
(ROW_NUMBER() over (PARTITION BY contract_address
ORDER BY
block_number)) - 1 AS function_input
FROM
contract_deployments
JOIN function_sigs
ON 1 = 1
JOIN function_inputs
ON 1 = 1
WHERE
function_name = 'underlying_coins'
),
inputs_pool_details AS (
SELECT
deployer_address,
contract_address,
block_number,
function_sig,
NULL AS function_input
FROM
contract_deployments
JOIN function_sigs
ON 1 = 1
WHERE
function_name IN (
'name',
'symbol',
'decimals'
)
),
all_inputs AS (
SELECT
deployer_address,
contract_address,
block_number,
function_sig,
function_input
FROM
inputs_coins
UNION ALL
SELECT
deployer_address,
contract_address,
block_number,
function_sig,
function_input
FROM
inputs_base_coins
UNION ALL
SELECT
deployer_address,
contract_address,
block_number,
function_sig,
function_input
FROM
inputs_underlying_coins
UNION ALL
SELECT
deployer_address,
contract_address,
block_number,
function_sig,
function_input
FROM
inputs_pool_details
),
build_rpc_requests AS (
SELECT
deployer_address,
contract_address,
block_number,
function_sig,
function_input,
CONCAT(
function_sig,
LPAD(IFNULL(function_input, 0), 64, '0')
) AS input,
utils.udf_json_rpc_call(
'eth_call',
[{'to': contract_address, 'from': null, 'data':input }, utils.udf_int_to_hex(block_number)],
concat_ws(
'-',
contract_address,
input,
block_number
)
) AS rpc_request,
row_num,
CEIL(
row_num / 50
) AS batch_no
FROM
all_inputs
LEFT JOIN contract_deployments USING(contract_address)
),
pool_token_reads AS (
{% if is_incremental() %}
{% for item in range(6) %}
(
SELECT
live.udf_api('POST', CONCAT('{Service}', '/', '{Authentication}'),{}, batch_rpc_request, 'Vault/prod/polygon/quicknode/mainnet') AS read_output, SYSDATE() AS _inserted_timestamp
FROM
(
SELECT
ARRAY_AGG(rpc_request) batch_rpc_request
FROM
build_rpc_requests
WHERE
batch_no = {{ item }} + 1
AND batch_no IN (
SELECT
DISTINCT batch_no
FROM
build_rpc_requests))) {% if not loop.last %}
UNION ALL
{% endif %}
{% endfor %}
{% else %}
{% for item in range(60) %}
(
SELECT
live.udf_api('POST', CONCAT('{Service}', '/', '{Authentication}'),{}, batch_rpc_request, 'Vault/prod/polygon/quicknode/mainnet') AS read_output, SYSDATE() AS _inserted_timestamp
FROM
(
SELECT
ARRAY_AGG(rpc_request) batch_rpc_request
FROM
build_rpc_requests
WHERE
batch_no = {{ item }} + 1
AND batch_no IN (
SELECT
DISTINCT batch_no
FROM
build_rpc_requests))) {% if not loop.last %}
UNION ALL
{% endif %}
{% endfor %}
{% endif %}),
reads_adjusted AS (
SELECT
VALUE :id :: STRING AS read_id,
VALUE :result :: STRING AS read_result,
SPLIT(
read_id,
'-'
) AS read_id_object,
read_id_object [0] :: STRING AS contract_address,
read_id_object [2] :: STRING AS block_number,
LEFT(
read_id_object [1] :: STRING,
10
) AS function_sig,
RIGHT(
read_id_object [1],
LENGTH(
read_id_object [1] - 10
)
) :: INT AS function_input,
_inserted_timestamp
FROM
pool_token_reads,
LATERAL FLATTEN(
input => read_output :data
)
),
tokens AS (
SELECT
contract_address,
function_sig,
function_name,
function_input,
read_result,
regexp_substr_all(SUBSTR(read_result, 3, len(read_result)), '.{64}') [0] AS segmented_token_address,
_inserted_timestamp
FROM
reads_adjusted
LEFT JOIN function_sigs USING(function_sig)
WHERE
function_name IN (
'coins',
'base_coins',
'underlying_coins'
)
AND read_result IS NOT NULL
),
pool_details AS (
SELECT
contract_address,
function_sig,
function_name,
function_input,
read_result,
regexp_substr_all(SUBSTR(read_result, 3, len(read_result)), '.{64}') AS segmented_output,
_inserted_timestamp
FROM
reads_adjusted
LEFT JOIN function_sigs USING(function_sig)
WHERE
function_name IN (
'name',
'symbol',
'decimals'
)
AND read_result IS NOT NULL
),
all_pools AS (
SELECT
t.contract_address AS pool_address,
CONCAT('0x', SUBSTRING(t.segmented_token_address, 25, 40)) AS token_address,
function_input AS token_id,
function_name AS token_type,
MIN(
CASE
WHEN p.function_name = 'symbol' THEN utils.udf_hex_to_string(RTRIM(p.segmented_output [2] :: STRING, 0))
END
) AS pool_symbol,
MIN(
CASE
WHEN p.function_name = 'name' THEN CONCAT(
utils.udf_hex_to_string(
p.segmented_output [2] :: STRING
),
utils.udf_hex_to_string(
segmented_output [3] :: STRING
)
)
END
) AS pool_name,
MIN(
CASE
WHEN p.read_result :: STRING = '0x' THEN NULL
ELSE utils.udf_hex_to_int(LEFT(p.read_result :: STRING, 66))
END
) :: INTEGER AS pool_decimals,
CONCAT(
t.contract_address,
'-',
CONCAT('0x', SUBSTRING(t.segmented_token_address, 25, 40)),
'-',
function_input,
'-',
function_name
) AS pool_id,
MAX(
t._inserted_timestamp
) AS _inserted_timestamp
FROM
tokens t
LEFT JOIN pool_details p USING(contract_address)
WHERE
token_address IS NOT NULL
AND token_address <> '0x0000000000000000000000000000000000000000'
GROUP BY
1,
2,
3,
4
),
FINAL AS (
SELECT
block_number,
block_timestamp,
tx_hash,
deployer_address,
pool_address,
CASE
WHEN token_address = '0xeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee' THEN '0x0d500b1d8e8ef31e21c99d1db9a6444d3adf1270'
ELSE token_address
END AS token_address,
token_id,
token_type,
CASE
WHEN token_address = '0xeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee' THEN 'WMATIC'
WHEN pool_symbol IS NULL THEN C.token_symbol
ELSE pool_symbol
END AS pool_symbol,
pool_name,
CASE
WHEN token_address = '0xeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee' THEN '18'
WHEN pool_decimals IS NULL THEN C.token_decimals
ELSE pool_decimals
END AS pool_decimals,
pool_id,
_call_id,
A._inserted_timestamp
FROM
all_pools A
LEFT JOIN {{ ref('silver__contracts') }} C
ON A.token_address = C.contract_address
LEFT JOIN contract_deployments d
ON A.pool_address = d.contract_address qualify(ROW_NUMBER() over(PARTITION BY pool_address, token_address
ORDER BY
A._inserted_timestamp DESC)) = 1
)
SELECT
*,
ROW_NUMBER() over (
PARTITION BY pool_address
ORDER BY
token_address ASC
) AS token_num
FROM
FINAL

View File

@ -1,29 +0,0 @@
version: 2
models:
- name: silver_dex__curve_pools
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- POOL_ID
columns:
- name: POOL_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- varchar
- name: TOKEN_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- varchar
- name: POOL_SYMBOL
- name: POOL_NAME
- name: POOL_DECIMALS
- name: _INSERTED_TIMESTAMP

View File

@ -1,225 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = 'block_number',
cluster_by = ['block_timestamp::DATE'],
tags = ['silver_dex','defi','dex','curated']
) }}
WITH pool_meta AS (
SELECT
pool_address,
pool_name,
token_address,
pool_symbol AS symbol,
token_id::INTEGER AS token_id,
token_type::STRING AS token_type
FROM
{{ ref('silver_dex__curve_pools') }}
),
pools AS (
SELECT
DISTINCT pool_address,
pool_name
FROM pool_meta
QUALIFY (ROW_NUMBER() OVER (PARTITION BY pool_address ORDER BY pool_name ASC NULLS LAST)) = 1
),
curve_base AS (
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
event_index,
CASE
WHEN topics [0] :: STRING = '0xd013ca23e77a65003c2c659c5442c00c805371b7fc1ebd4c206c41d1536bd90b' THEN 'TokenExchangeUnderlying'
ELSE 'TokenExchange'
END AS event_name,
contract_address AS pool_address,
pool_name,
regexp_substr_all(SUBSTR(DATA, 3, len(DATA)), '.{64}') AS segmented_data,
CONCAT('0x', SUBSTR(topics [1] :: STRING, 27, 40)) AS sender,
TRY_TO_NUMBER(utils.udf_hex_to_int(
segmented_data [0] :: STRING
)) AS sold_id,
TRY_TO_NUMBER(utils.udf_hex_to_int(
segmented_data [1] :: STRING
)) AS tokens_sold,
TRY_TO_NUMBER(utils.udf_hex_to_int(
segmented_data [2] :: STRING
)) AS bought_id,
TRY_TO_NUMBER(utils.udf_hex_to_int(
segmented_data [3] :: STRING
)) AS tokens_bought,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__fact_event_logs') }} l
INNER JOIN pools p
ON p.pool_address = l.contract_address
WHERE
topics [0] :: STRING IN (
'0x8b3e96f2b889fa771c53c981b40daf005f63f637f1869f707052d15a3dd97140',
'0xb2e76ae99761dc136e598d4a629bb347eccb9532a5f8bbd72e18467c3c34cc98',
'0xd013ca23e77a65003c2c659c5442c00c805371b7fc1ebd4c206c41d1536bd90b'
)
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
),
token_exchange AS (
SELECT
_log_id,
MAX(CASE WHEN sold_id = token_id THEN token_address END) AS token_in,
MAX(CASE WHEN bought_id = token_id THEN token_address END) AS token_out,
MAX(CASE WHEN sold_id = token_id THEN symbol END) AS symbol_in,
MAX(CASE WHEN bought_id = token_id THEN symbol END) AS symbol_out
FROM curve_base t
LEFT JOIN pool_meta p ON p.pool_address = t.pool_address AND (p.token_id = t.sold_id OR p.token_id = t.bought_id)
WHERE token_type = 'coins'
GROUP BY 1
),
token_transfers AS (
SELECT
tx_hash,
contract_address AS token_address,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
DATA :: STRING
)
) AS amount,
CONCAT('0x', SUBSTR(topics [1] :: STRING, 27, 40)) AS from_address,
CONCAT('0x', SUBSTR(topics [2] :: STRING, 27, 40)) AS to_address,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__fact_event_logs') }}
WHERE
topics [0] :: STRING = '0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef'
AND tx_hash IN (
SELECT
DISTINCT tx_hash
FROM
curve_base
)
AND CONCAT('0x', SUBSTR(topics [2] :: STRING, 27, 40)) <> '0x0000000000000000000000000000000000000000'
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
),
from_transfers AS (
SELECT
DISTINCT tx_hash,
token_address,
from_address,
amount
FROM
token_transfers
),
to_transfers AS (
SELECT
DISTINCT tx_hash,
token_address,
to_address,
amount
FROM
token_transfers
),
ready_pool_info AS (
SELECT
s.block_number,
s.block_timestamp,
s.tx_hash,
s.origin_function_signature,
s.origin_from_address,
s.origin_from_address AS tx_to,
s.origin_to_address,
event_index,
event_name,
pool_address,
pool_address AS contract_address,
pool_name,
sender,
sold_id,
tokens_sold,
COALESCE(sold.token_address,e.token_in) AS token_in,
e.symbol_in AS symbol_in,
bought_id,
tokens_bought,
COALESCE(bought.token_address,e.token_out) AS token_out,
e.symbol_out AS symbol_out,
s._log_id,
_inserted_timestamp
FROM
curve_base s
LEFT JOIN token_exchange e ON s._log_id = e._log_id
LEFT JOIN from_transfers sold
ON tokens_sold = sold.amount
AND s.tx_hash = sold.tx_hash
LEFT JOIN to_transfers bought
ON tokens_bought = bought.amount
AND s.tx_hash = bought.tx_hash
WHERE
tokens_sold <> 0
qualify(ROW_NUMBER() over(PARTITION BY s._log_id
ORDER BY
_inserted_timestamp DESC)) = 1
)
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
tx_to,
origin_to_address,
event_index,
event_name,
pool_address,
contract_address,
pool_name,
sender,
sold_id,
tokens_sold,
token_in,
symbol_in,
bought_id,
tokens_bought,
token_out,
symbol_out,
_log_id,
_inserted_timestamp,
'curve' AS platform
FROM
ready_pool_info

View File

@ -1,54 +0,0 @@
version: 2
models:
- name: silver_dex__curve_swaps
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _log_id
columns:
- name: TOKENS_SOLD
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- decimal
- float
- number
- name: TOKENS_BOUGHT
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- decimal
- float
- number
- name: BLOCK_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 1
- name: EVENT_INDEX
tests:
- not_null
- name: POOL_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: POOL_NAME
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- varchar
- name: SENDER
tests:
- not_null
- name: TX_TO
tests:
- not_null
- name: TOKEN_IN
- name: TOKEN_OUT
- name: TX_HASH
tests:
- not_null

View File

@ -1,57 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = 'pool_address',
tags = ['silver_dex','defi','dex','curated']
) }}
WITH pool_events AS (
SELECT
block_number,
block_timestamp,
tx_hash,
event_index,
contract_address,
regexp_substr_all(SUBSTR(DATA, 3, len(DATA)), '.{64}') AS segmented_data,
CONCAT('0x', SUBSTR(segmented_data [0] :: STRING, 25, 40)) AS newBorn,
CONCAT('0x', SUBSTR(segmented_data [1] :: STRING, 25, 40)) AS baseToken,
CONCAT('0x', SUBSTR(segmented_data [2] :: STRING, 25, 40)) AS quoteToken,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__fact_event_logs') }}
WHERE
contract_address = '0x357c5e9cfa8b834edcef7c7aabd8f9db09119d11' --DODOZoo
AND topics [0] :: STRING = '0x5c428a2e12ecaa744a080b25b4cda8b86359c82d726575d7d747e07708071f93' --DODOBirth
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
)
SELECT
block_number,
block_timestamp,
tx_hash,
event_index,
contract_address,
newBorn AS pool_address,
baseToken AS base_token,
quoteToken AS quote_token,
_log_id AS _id,
_inserted_timestamp
FROM
pool_events qualify(ROW_NUMBER() OVER (PARTITION BY pool_address
ORDER BY
_inserted_timestamp DESC)) = 1

View File

@ -1,17 +0,0 @@
version: 2
models:
- name: silver_dex__dodo_v1_pools
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- POOL_ADDRESS
columns:
- name: POOL_ADDRESS
tests:
- not_null
- name: _INSERTED_TIMESTAMP
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_LTZ
- TIMESTAMP_NTZ

View File

@ -1,184 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = 'block_number',
cluster_by = ['block_timestamp::DATE'],
tags = ['silver_dex','defi','dex','curated']
) }}
WITH pools AS (
SELECT
pool_address,
base_token,
quote_token
FROM {{ ref('silver_dex__dodo_v1_pools') }}
),
proxies AS (
SELECT
'0xdbfaf391c37339c903503495395ad7d6b096e192' AS proxy_address
UNION
SELECT
'0x6c30be15d88462b788dea7c6a860a2ccaf7b2670' AS proxy_address
),
sell_base_token AS (
SELECT
l.block_number,
l.block_timestamp,
l.tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
l.event_index,
l.contract_address,
regexp_substr_all(SUBSTR(l.data, 3, len(l.data)), '.{64}') AS l_segmented_data,
CONCAT('0x', SUBSTR(l.topics [1] :: STRING, 27, 40)) AS seller_address,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
l_segmented_data [0] :: STRING
)
) AS payBase,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
l_segmented_data [1] :: STRING
)
) AS receiveQuote,
base_token,
quote_token,
quote_token AS tokenIn,
base_token AS tokenOut,
receiveQuote AS amountIn,
payBase AS amountOut,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__fact_event_logs') }}
l
INNER JOIN pools p
ON p.pool_address = l.contract_address
WHERE
topics [0] :: STRING = '0xd8648b6ac54162763c86fd54bf2005af8ecd2f9cb273a5775921fd7f91e17b2d' --sellBaseToken
AND seller_address NOT IN (
SELECT
proxy_address
FROM
proxies
)
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
),
buy_base_token AS (
SELECT
l.block_number,
l.block_timestamp,
l.tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
l.event_index,
l.contract_address,
regexp_substr_all(SUBSTR(l.data, 3, len(l.data)), '.{64}') AS l_segmented_data,
CONCAT('0x', SUBSTR(l.topics [1] :: STRING, 27, 40)) AS buyer_address,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
l_segmented_data [0] :: STRING
)
) AS receiveBase,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
l_segmented_data [1] :: STRING
)
) AS payQuote,
base_token,
quote_token,
quote_token AS tokenIn,
base_token AS tokenOut,
payQuote AS amountIn,
receiveBase AS amountOut,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__fact_event_logs') }}
l
INNER JOIN pools p
ON p.pool_address = l.contract_address
WHERE
topics [0] :: STRING = '0xe93ad76094f247c0dafc1c61adc2187de1ac2738f7a3b49cb20b2263420251a3' --buyBaseToken
AND buyer_address NOT IN (
SELECT
proxy_address
FROM
proxies
)
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
)
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
event_index,
contract_address,
seller_address AS sender,
origin_from_address AS tx_to,
tokenIn AS token_in,
tokenOut AS token_out,
amountIn AS amount_in_unadj,
amountOut AS amount_out_unadj,
'SellBaseToken' AS event_name,
'dodo-v1' AS platform,
_log_id,
_inserted_timestamp
FROM
sell_base_token
UNION ALL
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
event_index,
contract_address,
buyer_address AS sender,
origin_from_address AS tx_to,
tokenIn AS token_in,
tokenOut AS token_out,
amountIn AS amount_in_unadj,
amountOut AS amount_out_unadj,
'BuyBaseToken' AS event_name,
'dodo-v1' AS platform,
_log_id,
_inserted_timestamp
FROM
buy_base_token

View File

@ -1,56 +0,0 @@
version: 2
models:
- name: silver_dex__dodo_v1_swaps
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: AMOUNT_IN_UNADJ
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- decimal
- float
- number
- name: AMOUNT_OUT_UNADJ
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- decimal
- float
- number
- name: TOKEN_IN
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TOKEN_OUT
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: BLOCK_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 1
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_LTZ
- TIMESTAMP_NTZ
- name: EVENT_INDEX
tests:
- not_null
- name: SENDER
tests:
- not_null
- name: TX_TO
tests:
- not_null
- name: TX_HASH
tests:
- not_null

View File

@ -1,73 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = 'pool_address',
tags = ['silver_dex','defi','dex','curated']
) }}
WITH pools AS (
SELECT
block_number,
block_timestamp,
tx_hash,
event_index,
contract_address,
regexp_substr_all(SUBSTR(DATA, 3, len(DATA)), '.{64}') AS segmented_data,
CONCAT('0x', SUBSTR(segmented_data [0] :: STRING, 25, 40)) AS baseToken,
CONCAT('0x', SUBSTR(segmented_data [1] :: STRING, 25, 40)) AS quoteToken,
CONCAT('0x', SUBSTR(segmented_data [2] :: STRING, 25, 40)) AS creator,
CONCAT('0x', SUBSTR(segmented_data [3] :: STRING, 25, 40)) AS pool_address,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__fact_event_logs') }}
WHERE
contract_address IN (
'0x95e887adf9eaa22cc1c6e3cb7f07adc95b4b25a8',
--dpp - factory,
'0xd24153244066f0afa9415563bfc7ba248bfb7a51',
--dpp advanced - private pool factory
'0x43c49f8dd240e1545f147211ec9f917376ac1e87',
--dsp - factory
'0x79887f65f83bdf15bcc8736b5e5bcdb48fb8fe13' --dvm - factory
)
AND topics [0] :: STRING IN (
'0x8494fe594cd5087021d4b11758a2bbc7be28a430e94f2b268d668e5991ed3b8a',
--NewDPP
'0xbc1083a2c1c5ef31e13fb436953d22b47880cf7db279c2c5666b16083afd6b9d',
--NewDSP
'0xaf5c5f12a80fc937520df6fcaed66262a4cc775e0f3fceaf7a7cfe476d9a751d' --NewDVM
)
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
)
SELECT
block_number,
block_timestamp,
tx_hash,
event_index,
contract_address,
baseToken AS base_token,
quoteToken AS quote_token,
creator,
pool_address,
_log_id,
_inserted_timestamp
FROM
pools qualify(ROW_NUMBER() over (PARTITION BY pool_address
ORDER BY
_inserted_timestamp DESC)) = 1

View File

@ -1,17 +0,0 @@
version: 2
models:
- name: silver_dex__dodo_v2_pools
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- POOL_ADDRESS
columns:
- name: POOL_ADDRESS
tests:
- not_null
- name: _INSERTED_TIMESTAMP
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_LTZ
- TIMESTAMP_NTZ

View File

@ -1,128 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = 'block_number',
cluster_by = ['block_timestamp::DATE'],
tags = ['silver_dex','defi','dex','curated']
) }}
WITH pools AS (
SELECT
DISTINCT pool_address
FROM
{{ ref('silver_dex__dodo_v2_pools') }}
),
proxies AS (
SELECT
'0xdbfaf391c37339c903503495395ad7d6b096e192' AS proxy_address
UNION
SELECT
'0x6c30be15d88462b788dea7c6a860a2ccaf7b2670' AS proxy_address
),
swaps_base AS (
SELECT
l.block_number,
l.block_timestamp,
l.tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
l.event_index,
l.contract_address,
regexp_substr_all(SUBSTR(l.data, 3, len(l.data)), '.{64}') AS l_segmented_data,
CONCAT(
'0x',
SUBSTR(
l_segmented_data [0] :: STRING,
25,
40
)
) AS fromToken,
CONCAT(
'0x',
SUBSTR(
l_segmented_data [1] :: STRING,
25,
40
)
) AS toToken,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
l_segmented_data [2] :: STRING
)
) AS fromAmount,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
l_segmented_data [3] :: STRING
)
) AS toAmount,
CONCAT(
'0x',
SUBSTR(
l_segmented_data [4] :: STRING,
25,
40
)
) AS trader_address,
CONCAT(
'0x',
SUBSTR(
l_segmented_data [5] :: STRING,
25,
40
)
) AS receiver_address,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__fact_event_logs') }}
l
INNER JOIN pools p
ON
l.contract_address = p.pool_address
WHERE
l.topics [0] :: STRING = '0xc2c0245e056d5fb095f04cd6373bc770802ebd1e6c918eb78fdef843cdb37b0f' --dodoswap
AND trader_address NOT IN (
SELECT
proxy_address
FROM
proxies
)
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
)
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
event_index,
contract_address,
fromToken AS token_in,
toToken AS token_out,
fromAmount AS amount_in_unadj,
toAmount AS amount_out_unadj,
trader_address AS sender,
receiver_address AS tx_to,
'DodoSwap' AS event_name,
'dodo-v2' AS platform,
_log_id,
_inserted_timestamp
FROM
swaps_base

View File

@ -1,56 +0,0 @@
version: 2
models:
- name: silver_dex__dodo_v2_swaps
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: AMOUNT_IN_UNADJ
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- decimal
- float
- number
- name: AMOUNT_OUT_UNADJ
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- decimal
- float
- number
- name: TOKEN_IN
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TOKEN_OUT
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: BLOCK_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 1
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_LTZ
- TIMESTAMP_NTZ
- name: EVENT_INDEX
tests:
- not_null
- name: SENDER
tests:
- not_null
- name: TX_TO
tests:
- not_null
- name: TX_HASH
tests:
- not_null

View File

@ -1,65 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = 'pool_address',
tags = ['silver_dex','defi','dex','curated']
) }}
WITH pool_creation AS (
SELECT
block_number,
block_timestamp,
tx_hash,
event_index,
contract_address AS factory_address,
regexp_substr_all(SUBSTR(DATA, 3, len(DATA)), '.{64}') AS segmented_data,
CONCAT('0x', SUBSTR(topics [1] :: STRING, 27, 40)) AS token0,
CONCAT('0x', SUBSTR(topics [2] :: STRING, 27, 40)) AS token1,
CONCAT('0x', SUBSTR(segmented_data [0] :: STRING, 25, 40)) AS pool_address,
utils.udf_hex_to_int(
segmented_data [1] :: STRING
) :: INT AS pool_id,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__fact_event_logs') }}
WHERE
contract_address IN (
'0xc2544a32872a91f4a553b404c6950e89de901fdb',
--v1 factory
'0x54f454d747e037da288db568d4121117eab34e79' --v2 factory
)
AND topics [0] :: STRING = '0x0d3648bd0f6ba80134a33ba9275ac585d9d315f0ad8355cddefde31afa28d0e9' --pairCreated
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
)
SELECT
block_number,
block_timestamp,
tx_hash,
factory_address,
event_index,
token0,
token1,
pool_address,
pool_id,
_log_id,
_inserted_timestamp
FROM
pool_creation qualify(ROW_NUMBER() over (PARTITION BY pool_address
ORDER BY
_inserted_timestamp DESC)) = 1

View File

@ -1,23 +0,0 @@
version: 2
models:
- name: silver_dex__fraxswap_pools
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- POOL_ADDRESS
columns:
- name: POOL_ADDRESS
tests:
- not_null
- name: TOKEN0
tests:
- not_null
- name: TOKEN1
tests:
- not_null
- name: _INSERTED_TIMESTAMP
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_LTZ
- TIMESTAMP_NTZ

View File

@ -1,123 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = 'block_number',
cluster_by = ['block_timestamp::DATE'],
tags = ['silver_dex','defi','dex','curated']
) }}
WITH pools AS (
SELECT
pool_address,
token0,
token1
FROM
{{ ref('silver_dex__fraxswap_pools') }}
),
swaps_base AS (
SELECT
l.block_number,
l.block_timestamp,
l.tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
l.event_index,
l.contract_address,
regexp_substr_all(SUBSTR(l.data, 3, len(l.data)), '.{64}') AS l_segmented_data,
CONCAT('0x', SUBSTR(l.topics [1] :: STRING, 27, 40)) AS sender_address,
CONCAT('0x', SUBSTR(l.topics [2] :: STRING, 27, 40)) AS to_address,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
l_segmented_data [0] :: STRING
)
) AS amount0In,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
l_segmented_data [1] :: STRING
)
) AS amount1In,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
l_segmented_data [2] :: STRING
)
) AS amount0Out,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
l_segmented_data [3] :: STRING
)
) AS amount1Out,
token0,
token1,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__fact_event_logs') }}
l
INNER JOIN pools
ON l.contract_address = pool_address
WHERE
l.topics [0] :: STRING = '0xd78ad95fa46c994b6551d0da85fc275fe613ce37657fb8d5e3d130840159d822' --Swap
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
)
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
event_index,
contract_address,
sender_address AS sender,
to_address AS tx_to,
amount0In,
amount1In,
amount0Out,
amount1Out,
token0,
token1,
CASE
WHEN amount0In <> 0
AND amount1In <> 0
AND amount0Out <> 0 THEN amount1In
WHEN amount0In <> 0 THEN amount0In
WHEN amount1In <> 0 THEN amount1In
END AS amount_in_unadj,
CASE
WHEN amount0Out <> 0 THEN amount0Out
WHEN amount1Out <> 0 THEN amount1Out
END AS amount_out_unadj,
CASE
WHEN amount0In <> 0
AND amount1In <> 0
AND amount0Out <> 0 THEN token1
WHEN amount0In <> 0 THEN token0
WHEN amount1In <> 0 THEN token1
END AS token_in,
CASE
WHEN amount0Out <> 0 THEN token0
WHEN amount1Out <> 0 THEN token1
END AS token_out,
'Swap' AS event_name,
'fraxswap' AS platform,
_log_id,
_inserted_timestamp
FROM
swaps_base
WHERE token_in <> token_out

View File

@ -1,81 +0,0 @@
version: 2
models:
- name: silver_dex__fraxswap_swaps
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: BLOCK_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_LTZ
- TIMESTAMP_NTZ
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: CONTRACT_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TOKEN_IN
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TOKEN_OUT
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: SENDER
tests:
- not_null:
where: BLOCK_TIMESTAMP > '2021-08-01'
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TX_TO
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: PLATFORM
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- STRING
- VARCHAR
- name: EVENT_INDEX
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: _LOG_ID
tests:
- not_null
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null
- name: ORIGIN_FROM_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: ORIGIN_TO_ADDRESS
tests:
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+

View File

@ -1,62 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = 'pool_address',
tags = ['silver_dex','defi','dex','curated']
) }}
WITH contract_deployments AS (
SELECT
tx_hash,
block_number,
block_timestamp,
from_address AS deployer_address,
to_address AS contract_address,
concat_ws(
'-',
block_number,
tx_position,
CONCAT(
TYPE,
'_',
trace_address
)
) AS _call_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__fact_traces') }}
WHERE
from_address IN (
'0x63ae536fec0b57bdeb1fd6a893191b4239f61bff',
'0x336bfba2c4d7bda5e1f83069d0a95509ecd5d2b5',
'0x9817a71ca8e309d654ee7e1999577bce6e6fd9ac'
)
AND TYPE ILIKE 'create%'
AND tx_succeeded
AND trace_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
qualify(ROW_NUMBER() over(PARTITION BY to_address
ORDER BY
block_timestamp ASC)) = 1
)
SELECT
tx_hash,
block_number,
block_timestamp,
deployer_address,
contract_address AS pool_address,
_call_id,
_inserted_timestamp
FROM
contract_deployments

View File

@ -1,11 +0,0 @@
version: 2
models:
- name: silver_dex__hashflow_pools
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- POOL_ADDRESS
columns:
- name: POOL_ADDRESS
tests:
- not_null

View File

@ -1,227 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = 'block_number',
cluster_by = ['block_timestamp::DATE'],
tags = ['silver_dex','defi','dex','curated']
) }}
WITH pools AS (
SELECT
pool_address
FROM
{{ ref('silver_dex__hashflow_pools') }}
),
router_swaps_base AS (
SELECT
l.block_number,
l.block_timestamp,
l.tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
l.event_index,
l.contract_address,
regexp_substr_all(SUBSTR(l.data, 3, len(l.data)), '.{64}') AS l_segmented_data,
CONCAT(
'0x',
SUBSTR(
l_segmented_data [1] :: STRING,
25,
40
)
) AS account_address,
CONCAT(
'0x',
SUBSTR(
l_segmented_data [3] :: STRING,
25,
40
)
) AS tokenIn,
CONCAT(
'0x',
SUBSTR(
l_segmented_data [4] :: STRING,
25,
40
)
) AS tokenOut,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
l_segmented_data [5] :: STRING
)
) AS amountIn,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
l_segmented_data [6] :: STRING
)
) AS amountOut,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__fact_event_logs') }}
l
INNER JOIN pools p
ON l.contract_address = p.pool_address
WHERE
l.topics [0] :: STRING = '0xb709ddcc6550418e9b89df1f4938071eeaa3f6376309904c77e15d46b16066f5' --swap
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
),
swaps_base AS (
SELECT
l.block_number,
l.block_timestamp,
l.tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
l.event_index,
l.contract_address,
regexp_substr_all(SUBSTR(l.data, 3, len(l.data)), '.{64}') AS l_segmented_data,
CONCAT(
'0x',
SUBSTR(
l_segmented_data [0] :: STRING,
25,
40
)
) AS account_address,
CONCAT(
'0x',
SUBSTR(
l_segmented_data [2] :: STRING,
25,
40
)
) AS tokenIn,
CONCAT(
'0x',
SUBSTR(
l_segmented_data [3] :: STRING,
25,
40
)
) AS tokenOut,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
l_segmented_data [4] :: STRING
)
) AS amountIn,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
l_segmented_data [5] :: STRING
)
) AS amountOut,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__fact_event_logs') }}
l
INNER JOIN pools p
ON l.contract_address = p.pool_address
WHERE
l.topics [0] :: STRING = '0x8cf3dec1929508e5677d7db003124e74802bfba7250a572205a9986d86ca9f1e' --swap
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
),
FINAL AS (
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
event_index,
contract_address,
origin_from_address AS sender,
account_address AS tx_to,
tokenIn AS token_in,
tokenOut AS token_out,
amountIn AS amount_in_unadj,
amountOut AS amount_out_unadj,
'Swap' AS event_name,
'hashflow' AS platform,
_log_id,
_inserted_timestamp
FROM
router_swaps_base
UNION ALL
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
event_index,
contract_address,
origin_from_address AS sender,
account_address AS tx_to,
tokenIn AS token_in,
tokenOut AS token_out,
amountIn AS amount_in_unadj,
amountOut AS amount_out_unadj,
'Swap' AS event_name,
'hashflow' AS platform,
_log_id,
_inserted_timestamp
FROM
swaps_base
)
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
event_index,
contract_address,
sender,
tx_to,
CASE
WHEN token_in = '0x0000000000000000000000000000000000000000' THEN '0x0d500b1d8e8ef31e21c99d1db9a6444d3adf1270'
ELSE token_in
END AS token_in,
CASE
WHEN token_out = '0x0000000000000000000000000000000000000000' THEN '0x0d500b1d8e8ef31e21c99d1db9a6444d3adf1270'
ELSE token_out
END AS token_out,
amount_in_unadj,
amount_out_unadj,
event_name,
platform,
_log_id,
_inserted_timestamp
FROM
FINAL

View File

@ -1,81 +0,0 @@
version: 2
models:
- name: silver_dex__hashflow_swaps
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: BLOCK_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_NTZ
- TIMESTAMP_LTZ
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: CONTRACT_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TOKEN_IN
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TOKEN_OUT
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: SENDER
tests:
- not_null:
where: BLOCK_TIMESTAMP > '2021-08-01'
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TX_TO
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: PLATFORM
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- STRING
- VARCHAR
- name: EVENT_INDEX
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: _LOG_ID
tests:
- not_null
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null
- name: ORIGIN_FROM_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: ORIGIN_TO_ADDRESS
tests:
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+

View File

@ -1,51 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = "pool_address",
tags = ['silver_dex','defi','dex','curated']
) }}
WITH contract_deployments AS (
SELECT
tx_hash,
block_number,
block_timestamp,
regexp_substr_all(SUBSTR(DATA, 3, len(DATA)), '.{64}') AS segmented_data,
origin_from_address AS deployer_address,
CONCAT('0x', SUBSTR(segmented_data [0] :: STRING, 25, 40)) AS pool_address,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__fact_event_logs') }}
WHERE
contract_address = '0xde828fdc3f497f16416d1bb645261c7c6a62dab5'
AND topics [0] :: STRING = '0xdbd2a1ea6808362e6adbec4db4969cbc11e3b0b28fb6c74cb342defaaf1daada'
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
)
SELECT
tx_hash,
block_number,
block_timestamp,
deployer_address,
pool_address,
_log_id,
_inserted_timestamp
FROM
contract_deployments qualify(ROW_NUMBER() over (PARTITION BY pool_address
ORDER BY
_inserted_timestamp DESC)) = 1

View File

@ -1,11 +0,0 @@
version: 2
models:
- name: silver_dex__hashflow_v3_pools
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- POOL_ADDRESS
columns:
- name: POOL_ADDRESS
tests:
- not_null

View File

@ -1,97 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = "block_number",
cluster_by = ['block_timestamp::DATE'],
tags = ['silver_dex','defi','dex','curated']
) }}
WITH pools AS (
SELECT
pool_address
FROM
{{ ref('silver_dex__hashflow_v3_pools') }}
),
swaps AS (
SELECT
l.block_number,
l.block_timestamp,
l.tx_hash,
l.origin_function_signature,
l.origin_from_address,
l.origin_to_address,
l.event_index,
contract_address,
regexp_substr_all(SUBSTR(DATA, 3, len(DATA)), '.{64}') AS segmented_data,
CONCAT('0x', SUBSTR(segmented_data [0] :: STRING, 25, 40)) AS trader_address,
CONCAT('0x', SUBSTR(segmented_data [1] :: STRING, 25, 40)) AS effective_trader_address,
CONCAT(
'0x',
segmented_data [2] :: STRING
) AS txid,
CONCAT('0x', SUBSTR(segmented_data [3] :: STRING, 25, 40)) AS tokenIn,
CONCAT('0x', SUBSTR(segmented_data [4] :: STRING, 25, 40)) AS tokenOut,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [5] :: STRING
)
) AS amountIn,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [6] :: STRING
)
) AS amountOut,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__fact_event_logs') }}
l
INNER JOIN pools p
ON l.contract_address = p.pool_address
WHERE
l.topics [0] :: STRING = '0x34f57786fb01682fb4eec88d340387ef01a168fe345ea5b76f709d4e560c10eb' --Trade
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
)
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
event_index,
contract_address,
effective_trader_address AS sender,
trader_address AS tx_to,
txid,
CASE
WHEN tokenIn = '0x0000000000000000000000000000000000000000' THEN '0x0d500b1d8e8ef31e21c99d1db9a6444d3adf1270'
ELSE tokenIn
END AS token_in,
CASE
WHEN tokenOut = '0x0000000000000000000000000000000000000000' THEN '0x0d500b1d8e8ef31e21c99d1db9a6444d3adf1270'
ELSE tokenOut
END AS token_out,
amountIn AS amount_in_unadj,
amountOut AS amount_out_unadj,
'Trade' AS event_name,
'hashflow-v3' AS platform,
_log_id,
_inserted_timestamp
FROM
swaps

View File

@ -1,81 +0,0 @@
version: 2
models:
- name: silver_dex__hashflow_v3_swaps
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: BLOCK_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_NTZ
- TIMESTAMP_LTZ
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: CONTRACT_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TOKEN_IN
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TOKEN_OUT
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: SENDER
tests:
- not_null:
where: BLOCK_TIMESTAMP > '2021-08-01'
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TX_TO
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: PLATFORM
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- STRING
- VARCHAR
- name: EVENT_INDEX
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: _LOG_ID
tests:
- not_null
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null
- name: ORIGIN_FROM_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: ORIGIN_TO_ADDRESS
tests:
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+

View File

@ -1,69 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = 'pool_address',
tags = ['silver_dex','defi','dex','curated']
) }}
WITH pool_creation AS (
SELECT
block_number,
block_timestamp,
tx_hash,
event_index,
contract_address,
regexp_substr_all(SUBSTR(DATA, 3, len(DATA)), '.{64}') AS segmented_data,
CONCAT('0x', SUBSTR(topics [1] :: STRING, 27, 40)) AS token0,
CONCAT('0x', SUBSTR(topics [2] :: STRING, 27, 40)) AS token1,
CONCAT('0x', SUBSTR(segmented_data [0] :: STRING, 25, 40)) AS pool_address,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [1] :: STRING
)
) AS ampBps,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [2] :: STRING
)
) AS totalPool,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__fact_event_logs') }}
WHERE
contract_address = LOWER('0x5F1fe642060B5B9658C15721Ea22E982643c095c') --dynamic fee factory
AND topics [0] :: STRING = '0xfc574402c445e75f2b79b67884ff9c662244dce454c5ae68935fcd0bebb7c8ff' --created pool
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
)
SELECT
block_number,
block_timestamp,
tx_hash,
contract_address,
event_index,
token0,
token1,
pool_address,
ampBps AS amp_bps,
totalPool AS total_pool,
_log_id,
_inserted_timestamp
FROM
pool_creation qualify(ROW_NUMBER() over (PARTITION BY pool_address
ORDER BY
_inserted_timestamp DESC)) = 1

View File

@ -1,23 +0,0 @@
version: 2
models:
- name: silver_dex__kyberswap_v1_dynamic_pools
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- POOL_ADDRESS
columns:
- name: POOL_ADDRESS
tests:
- not_null
- name: TOKEN0
tests:
- not_null
- name: TOKEN1
tests:
- not_null
- name: _INSERTED_TIMESTAMP
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_LTZ
- TIMESTAMP_NTZ

View File

@ -1,130 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = 'block_number',
cluster_by = ['block_timestamp::DATE'],
tags = ['silver_dex','defi','dex','curated']
) }}
WITH pools AS (
SELECT
pool_address,
token0,
token1
FROM
{{ ref('silver_dex__kyberswap_v1_dynamic_pools') }}
),
swaps_base AS (
SELECT
l.block_number,
l.block_timestamp,
l.tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
l.event_index,
l.contract_address,
regexp_substr_all(SUBSTR(l.data, 3, len(l.data)), '.{64}') AS l_segmented_data,
CONCAT('0x', SUBSTR(l.topics [1] :: STRING, 27, 40)) AS sender_address,
CONCAT('0x', SUBSTR(l.topics [2] :: STRING, 27, 40)) AS to_address,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
l_segmented_data [0] :: STRING
)
) AS amount0In,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
l_segmented_data [1] :: STRING
)
) AS amount1In,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
l_segmented_data [2] :: STRING
)
) AS amount0Out,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
l_segmented_data [3] :: STRING
)
) AS amount1Out,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
l_segmented_data [4] :: STRING
)
) AS feeInPrecision,
token0,
token1,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__fact_event_logs') }}
l
INNER JOIN pools p
ON p.pool_address = l.contract_address
WHERE
l.topics [0] :: STRING = '0x606ecd02b3e3b4778f8e97b2e03351de14224efaa5fa64e62200afc9395c2499' --Dynamic Swap
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
)
SELECT
block_number,
block_timestamp,
tx_hash,
contract_address,
origin_function_signature,
origin_from_address,
origin_to_address,
event_index,
sender_address AS sender,
to_address AS tx_to,
amount0In,
amount1In,
amount0Out,
amount1Out,
feeInPrecision AS fee_in_precision,
token0,
token1,
CASE
WHEN amount0In <> 0
AND amount1In <> 0
AND amount0Out <> 0 THEN amount1In
WHEN amount0In <> 0 THEN amount0In
WHEN amount1In <> 0 THEN amount1In
END AS amount_in_unadj,
CASE
WHEN amount0Out <> 0 THEN amount0Out
WHEN amount1Out <> 0 THEN amount1Out
END AS amount_out_unadj,
CASE
WHEN amount0In <> 0
AND amount1In <> 0
AND amount0Out <> 0 THEN token1
WHEN amount0In <> 0 THEN token0
WHEN amount1In <> 0 THEN token1
END AS token_in,
CASE
WHEN amount0Out <> 0 THEN token0
WHEN amount1Out <> 0 THEN token1
END AS token_out,
'Dynamic Swap' AS event_name,
'kyberswap-v1' AS platform,
_log_id,
_inserted_timestamp
FROM
swaps_base
WHERE
token_in <> token_out

View File

@ -1,81 +0,0 @@
version: 2
models:
- name: silver_dex__kyberswap_v1_dynamic_swaps
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: BLOCK_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_LTZ
- TIMESTAMP_NTZ
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: CONTRACT_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TOKEN_IN
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TOKEN_OUT
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: SENDER
tests:
- not_null:
where: BLOCK_TIMESTAMP > '2021-08-01'
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TX_TO
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: PLATFORM
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- STRING
- VARCHAR
- name: EVENT_INDEX
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: _LOG_ID
tests:
- not_null
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null
- name: ORIGIN_FROM_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: ORIGIN_TO_ADDRESS
tests:
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+

View File

@ -1,76 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = 'pool_address',
tags = ['silver_dex','defi','dex','curated']
) }}
WITH pool_creation AS (
SELECT
block_number,
block_timestamp,
tx_hash,
event_index,
contract_address,
regexp_substr_all(SUBSTR(DATA, 3, len(DATA)), '.{64}') AS segmented_data,
CONCAT('0x', SUBSTR(topics [1] :: STRING, 27, 40)) AS token0,
CONCAT('0x', SUBSTR(topics [2] :: STRING, 27, 40)) AS token1,
CONCAT('0x', SUBSTR(segmented_data [0] :: STRING, 25, 40)) AS pool_address,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [1] :: STRING
)
) AS ampBps,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [2] :: STRING
)
) AS feeUnits,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [3] :: STRING
)
) AS totalPool,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__fact_event_logs') }}
WHERE
contract_address = LOWER('0x1c758aF0688502e49140230F6b0EBd376d429be5') --static pool factory
AND topics [0] :: STRING = '0xb6bce363b712c921bead4bcc977289440eb6172eb89e258e3a25bd49ca806de6' --create pool
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
)
SELECT
block_number,
block_timestamp,
tx_hash,
contract_address,
event_index,
token0,
token1,
pool_address,
ampBps AS amp_bps,
feeUnits AS fee_units,
totalPool AS total_pool,
_log_id,
_inserted_timestamp
FROM
pool_creation
qualify(ROW_NUMBER() over (PARTITION BY pool_address
ORDER BY
_inserted_timestamp DESC)) = 1

View File

@ -1,23 +0,0 @@
version: 2
models:
- name: silver_dex__kyberswap_v1_static_pools
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- POOL_ADDRESS
columns:
- name: POOL_ADDRESS
tests:
- not_null
- name: TOKEN0
tests:
- not_null
- name: TOKEN1
tests:
- not_null
- name: _INSERTED_TIMESTAMP
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_LTZ
- TIMESTAMP_NTZ

View File

@ -1,130 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = 'block_number',
cluster_by = ['block_timestamp::DATE'],
tags = ['silver_dex','defi','dex','curated']
) }}
WITH pools AS (
SELECT
pool_address,
token0,
token1
FROM
{{ ref('silver_dex__kyberswap_v1_static_pools') }}
),
swaps_base AS (
SELECT
l.block_number,
l.block_timestamp,
l.tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
l.event_index,
l.contract_address,
regexp_substr_all(SUBSTR(l.data, 3, len(l.data)), '.{64}') AS l_segmented_data,
CONCAT('0x', SUBSTR(l.topics [1] :: STRING, 27, 40)) AS sender_address,
CONCAT('0x', SUBSTR(l.topics [2] :: STRING, 27, 40)) AS to_address,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
l_segmented_data [0] :: STRING
)
) AS amount0In,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
l_segmented_data [1] :: STRING
)
) AS amount1In,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
l_segmented_data [2] :: STRING
)
) AS amount0Out,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
l_segmented_data [3] :: STRING
)
) AS amount1Out,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
l_segmented_data [4] :: STRING
)
) AS feeInPrecision,
token0,
token1,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__fact_event_logs') }}
l
INNER JOIN pools p
ON p.pool_address = l.contract_address
WHERE
l.topics [0] :: STRING = '0x606ecd02b3e3b4778f8e97b2e03351de14224efaa5fa64e62200afc9395c2499' -- static swap
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
)
SELECT
block_number,
block_timestamp,
tx_hash,
contract_address,
origin_function_signature,
origin_from_address,
origin_to_address,
event_index,
sender_address AS sender,
to_address AS tx_to,
amount0In,
amount1In,
amount0Out,
amount1Out,
feeInPrecision AS fee_in_precision,
token0,
token1,
CASE
WHEN amount0In <> 0
AND amount1In <> 0
AND amount0Out <> 0 THEN amount1In
WHEN amount0In <> 0 THEN amount0In
WHEN amount1In <> 0 THEN amount1In
END AS amount_in_unadj,
CASE
WHEN amount0Out <> 0 THEN amount0Out
WHEN amount1Out <> 0 THEN amount1Out
END AS amount_out_unadj,
CASE
WHEN amount0In <> 0
AND amount1In <> 0
AND amount0Out <> 0 THEN token1
WHEN amount0In <> 0 THEN token0
WHEN amount1In <> 0 THEN token1
END AS token_in,
CASE
WHEN amount0Out <> 0 THEN token0
WHEN amount1Out <> 0 THEN token1
END AS token_out,
'Static Swap' AS event_name,
'kyberswap-v1' AS platform,
_log_id,
_inserted_timestamp
FROM
swaps_base
WHERE
token_in <> token_out

View File

@ -1,81 +0,0 @@
version: 2
models:
- name: silver_dex__kyberswap_v1_static_swaps
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: BLOCK_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_LTZ
- TIMESTAMP_NTZ
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: CONTRACT_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TOKEN_IN
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TOKEN_OUT
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: SENDER
tests:
- not_null:
where: BLOCK_TIMESTAMP > '2021-08-01'
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TX_TO
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: PLATFORM
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- STRING
- VARCHAR
- name: EVENT_INDEX
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: _LOG_ID
tests:
- not_null
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null
- name: ORIGIN_FROM_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: ORIGIN_TO_ADDRESS
tests:
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+

View File

@ -1,66 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = 'pool_address',
tags = ['silver_dex','defi','dex','curated']
) }}
WITH pool_creation AS (
SELECT
block_number,
block_timestamp,
tx_hash,
event_index,
contract_address,
regexp_substr_all(SUBSTR(DATA, 3, len(DATA)), '.{64}') AS segmented_data,
CONCAT('0x', SUBSTR(topics [1] :: STRING, 27, 40)) AS token0,
CONCAT('0x', SUBSTR(topics [2] :: STRING, 27, 40)) AS token1,
TRY_TO_NUMBER(utils.udf_hex_to_int(topics [3] :: STRING)) AS swapFeeUnits,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [0] :: STRING
)
) AS tickDistance,
CONCAT('0x', SUBSTR(segmented_data [1] :: STRING, 25, 40)) AS pool_address,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__fact_event_logs') }}
WHERE
contract_address = '0x5f1dddbf348ac2fbe22a163e30f99f9ece3dd50a' --Elastic Pool Deployer
AND topics [0] :: STRING = '0x783cca1c0412dd0d695e784568c96da2e9c22ff989357a2e8b1d9b2b4e6b7118' --Create pool
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
)
SELECT
block_number,
block_timestamp,
tx_hash,
contract_address,
event_index,
token0,
token1,
swapFeeUnits AS swap_fee_units,
tickDistance AS tick_distance,
pool_address,
_log_id,
_inserted_timestamp
FROM
pool_creation
qualify(ROW_NUMBER() over (PARTITION BY pool_address
ORDER BY
_inserted_timestamp DESC)) = 1

View File

@ -1,23 +0,0 @@
version: 2
models:
- name: silver_dex__kyberswap_v2_elastic_pools
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- POOL_ADDRESS
columns:
- name: POOL_ADDRESS
tests:
- not_null
- name: TOKEN0
tests:
- not_null
- name: TOKEN1
tests:
- not_null
- name: _INSERTED_TIMESTAMP
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_LTZ
- TIMESTAMP_NTZ

View File

@ -1,125 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = 'block_number',
cluster_by = ['block_timestamp::DATE'],
tags = ['silver_dex','defi','dex','curated']
) }}
WITH pools AS (
SELECT
pool_address,
token0,
token1
FROM
{{ ref('silver_dex__kyberswap_v2_elastic_pools') }}
),
swaps_base AS (
SELECT
l.block_number,
l.block_timestamp,
l.tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
l.event_index,
l.contract_address,
regexp_substr_all(SUBSTR(l.data, 3, len(l.data)), '.{64}') AS l_segmented_data,
CONCAT('0x', SUBSTR(l.topics [1] :: STRING, 27, 40)) AS sender_address,
CONCAT('0x', SUBSTR(l.topics [2] :: STRING, 27, 40)) AS reipient_address,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
's2c',
l_segmented_data [0] :: STRING
)
) AS deltaQty0,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
's2c',
l_segmented_data [1] :: STRING
)
) AS deltaQty1,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
l_segmented_data [2] :: STRING
)
) AS sqrtP,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
l_segmented_data [3] :: STRING
)
) AS liquidity,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
's2c',
l_segmented_data [4] :: STRING
)
) AS currentTick,
ABS(GREATEST(deltaQty0, deltaQty1)) AS amountOut,
ABS(LEAST(deltaQty0, deltaQty1)) AS amountIn,
token0,
token1,
CASE
WHEN deltaQty0 < 0 THEN token0
ELSE token1
END AS token_in,
CASE
WHEN deltaQty0 > 0 THEN token0
ELSE token1
END AS token_out,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__fact_event_logs') }}
l
INNER JOIN pools p
ON p.pool_address = l.contract_address
WHERE
topics [0] :: STRING = '0xc42079f94a6350d7e6235f29174924f928cc2ac818eb64fed8004e115fbcca67' -- elastic swap
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
)
SELECT
block_number,
block_timestamp,
tx_hash,
contract_address,
origin_function_signature,
origin_from_address,
origin_to_address,
event_index,
sender_address AS sender,
reipient_address AS tx_to,
deltaQty0 AS delta_qty0,
deltaQty1 AS delta_qty1,
sqrtP AS sqrt_p,
liquidity,
currentTick AS current_tick,
amountIn AS amount_in_unadj,
amountOut AS amount_out_unadj,
token0,
token1,
token_in,
token_out,
'Elastic Swap' AS event_name,
'kyberswap-v2' AS platform,
_log_id,
_inserted_timestamp
FROM
swaps_base
WHERE
token_in <> token_out

View File

@ -1,81 +0,0 @@
version: 2
models:
- name: silver_dex__kyberswap_v2_elastic_swaps
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: BLOCK_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_LTZ
- TIMESTAMP_NTZ
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: CONTRACT_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TOKEN_IN
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TOKEN_OUT
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: SENDER
tests:
- not_null:
where: BLOCK_TIMESTAMP > '2021-08-01'
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TX_TO
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: PLATFORM
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- STRING
- VARCHAR
- name: EVENT_INDEX
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: _LOG_ID
tests:
- not_null
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null
- name: ORIGIN_FROM_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: ORIGIN_TO_ADDRESS
tests:
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+

View File

@ -1,62 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = 'pool_address',
tags = ['silver_dex','defi','dex','curated']
) }}
WITH pool_creation AS (
SELECT
block_number,
block_timestamp,
tx_hash,
event_index,
contract_address,
regexp_substr_all(SUBSTR(DATA, 3, len(DATA)), '.{64}') AS segmented_data,
CONCAT('0x', SUBSTR(topics [1] :: STRING, 27, 40)) AS token0,
CONCAT('0x', SUBSTR(topics [2] :: STRING, 27, 40)) AS token1,
CONCAT('0x', SUBSTR(segmented_data [0] :: STRING, 25, 40)) AS pool_address,
utils.udf_hex_to_int(
segmented_data [1] :: STRING
) :: INT AS pool_id,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__fact_event_logs') }}
WHERE
contract_address = LOWER('0x5757371414417b8C6CAad45bAeF941aBc7d3Ab32')
AND topics [0] :: STRING = '0x0d3648bd0f6ba80134a33ba9275ac585d9d315f0ad8355cddefde31afa28d0e9' --PairCreated
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
)
SELECT
block_number,
block_timestamp,
tx_hash,
contract_address,
event_index,
token0,
token1,
pool_address,
pool_id,
_log_id,
_inserted_timestamp
FROM
pool_creation
qualify(ROW_NUMBER() over (PARTITION BY pool_address
ORDER BY
_inserted_timestamp DESC)) = 1

View File

@ -1,23 +0,0 @@
version: 2
models:
- name: silver_dex__quickswap_v2_pools
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- POOL_ADDRESS
columns:
- name: POOL_ADDRESS
tests:
- not_null
- name: TOKEN0
tests:
- not_null
- name: TOKEN1
tests:
- not_null
- name: _INSERTED_TIMESTAMP
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_LTZ
- TIMESTAMP_NTZ

View File

@ -1,123 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = 'block_number',
cluster_by = ['block_timestamp::DATE'],
tags = ['silver_dex','defi','dex','curated']
) }}
WITH pools AS (
SELECT
pool_address,
token0,
token1
FROM
{{ ref('silver_dex__quickswap_v2_pools') }}
),
swaps_base AS (
SELECT
block_number,
origin_function_signature,
origin_from_address,
origin_to_address,
block_timestamp,
tx_hash,
event_index,
contract_address,
regexp_substr_all(SUBSTR(DATA, 3, len(DATA)), '.{64}') AS segmented_data,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [0] :: STRING
) :: INTEGER
) AS amount0In,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [1] :: STRING
) :: INTEGER
) AS amount1In,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [2] :: STRING
) :: INTEGER
) AS amount0Out,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [3] :: STRING
) :: INTEGER
) AS amount1Out,
CONCAT('0x', SUBSTR(topics [1] :: STRING, 27, 40)) AS sender,
CONCAT('0x', SUBSTR(topics [2] :: STRING, 27, 40)) AS tx_to,
token0,
token1,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__fact_event_logs') }}
INNER JOIN pools p
ON p.pool_address = contract_address
WHERE
topics [0] :: STRING = '0xd78ad95fa46c994b6551d0da85fc275fe613ce37657fb8d5e3d130840159d822'
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
)
SELECT
block_number,
block_timestamp,
origin_function_signature,
origin_from_address,
origin_to_address,
tx_hash,
event_index,
contract_address,
sender,
tx_to,
amount0In,
amount1In,
amount0Out,
amount1Out,
token0,
token1,
CASE
WHEN amount0In <> 0
AND amount1In <> 0
AND amount0Out <> 0 THEN amount1In
WHEN amount0In <> 0 THEN amount0In
WHEN amount1In <> 0 THEN amount1In
END AS amount_in_unadj,
CASE
WHEN amount0Out <> 0 THEN amount0Out
WHEN amount1Out <> 0 THEN amount1Out
END AS amount_out_unadj,
CASE
WHEN amount0In <> 0
AND amount1In <> 0
AND amount0Out <> 0 THEN token1
WHEN amount0In <> 0 THEN token0
WHEN amount1In <> 0 THEN token1
END AS token_in,
CASE
WHEN amount0Out <> 0 THEN token0
WHEN amount1Out <> 0 THEN token1
END AS token_out,
'Swap' AS event_name,
'quickswap-v2' AS platform,
_log_id,
_inserted_timestamp
FROM
swaps_base
WHERE
token_in <> token_out

View File

@ -1,116 +0,0 @@
version: 2
models:
- name: silver_dex__quickswap_v2_swaps
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: BLOCK_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 2
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_LTZ
- TIMESTAMP_NTZ
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: CONTRACT_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: AMOUNT_IN_UNADJ
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: AMOUNT_IN
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: AMOUNT_OUT_UNADJ
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: AMOUNT_OUT
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: TOKEN_IN
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TOKEN_OUT
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: SYMBOL_IN
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- STRING
- VARCHAR
- name: SYMBOL_OUT
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- STRING
- VARCHAR
- name: TX_TO
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: PLATFORM
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- STRING
- VARCHAR
- name: EVENT_INDEX
tests:
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: _LOG_ID
tests:
- not_null
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null
- name: ORIGIN_FROM_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: ORIGIN_TO_ADDRESS
tests:
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+

View File

@ -1,60 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = 'pool_address',
cluster_by = ['_inserted_timestamp::DATE'],
tags = ['silver_dex','defi','dex','curated']
) }}
WITH pool_creation AS (
SELECT
block_number,
block_timestamp,
tx_hash,
event_index,
contract_address,
regexp_substr_all(SUBSTR(DATA, 3, len(DATA)), '.{64}') AS segmented_data,
LOWER(CONCAT('0x', SUBSTR(topics [1] :: STRING, 27, 40))) AS token0_address,
LOWER(CONCAT('0x', SUBSTR(topics [2] :: STRING, 27, 40))) AS token1_address,
CONCAT('0x', SUBSTR(segmented_data [0] :: STRING, 25, 40)) AS pool_address,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__fact_event_logs') }}
WHERE
topics [0] = '0x91ccaa7a278130b65168c3a0c8d3bcae84cf5e43704342bd3ec0b59e59c036db'
AND contract_address = LOWER('0x411b0fAcC3489691f28ad58c47006AF5E3Ab3A28')
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
)
SELECT
block_number,
block_timestamp,
tx_hash,
contract_address,
event_index,
token0_address,
token1_address,
pool_address,
_log_id,
_inserted_timestamp
FROM
pool_creation
qualify(ROW_NUMBER() over (PARTITION BY pool_address
ORDER BY
_inserted_timestamp DESC)) = 1

View File

@ -1,23 +0,0 @@
version: 2
models:
- name: silver_dex__quickswap_v3_pools
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- POOL_ADDRESS
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: BLOCK_TIMESTAMP
tests:
- not_null
- name: POOL_ADDRESS
tests:
- not_null
- name: TX_HASH
tests:
- not_null

View File

@ -1,99 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = 'block_number',
cluster_by = ['block_timestamp::DATE'],
tags = ['silver_dex','defi','dex','curated']
) }}
WITH base_swaps AS (
SELECT
*,
regexp_substr_all(SUBSTR(DATA, 3, len(DATA)), '.{64}') AS segmented_data,
CONCAT('0x', SUBSTR(topics [1] :: STRING, 27, 40)) AS sender,
CONCAT('0x', SUBSTR(topics [2] :: STRING, 27, 40)) AS recipient,
utils.udf_hex_to_int(
's2c',
segmented_data [0] :: STRING
) :: FLOAT AS amount0_unadj,
utils.udf_hex_to_int(
's2c',
segmented_data [1] :: STRING
) :: FLOAT AS amount1_unadj,
utils.udf_hex_to_int(
's2c',
segmented_data [2] :: STRING
) :: FLOAT AS price,
utils.udf_hex_to_int(
's2c',
segmented_data [3] :: STRING
) :: FLOAT AS liquidity,
utils.udf_hex_to_int(
's2c',
segmented_data [4] :: STRING
) :: FLOAT AS tick,
CONCAT(
tx_hash,
'-',
event_index
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__fact_event_logs') }}
WHERE
block_timestamp :: DATE > '2022-08-01'
AND topics [0] :: STRING = '0xc42079f94a6350d7e6235f29174924f928cc2ac818eb64fed8004e115fbcca67'
AND tx_succeeded
AND event_removed = 'false'
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
),
pool_data AS (
SELECT
token0_address,
token1_address,
pool_address
FROM
{{ ref('silver_dex__quickswap_v3_pools') }}
),
FINAL AS (
SELECT
block_number,
block_timestamp,
tx_hash,
contract_address AS pool_address,
recipient,
sender,
tick,
price,
liquidity,
event_index,
token0_address,
token1_address,
origin_function_signature,
origin_from_address,
origin_to_address,
amount0_unadj,
amount1_unadj,
_log_id,
_inserted_timestamp
FROM
base_swaps
INNER JOIN pool_data
ON pool_data.pool_address = base_swaps.contract_address
)
SELECT
*
FROM
FINAL qualify(ROW_NUMBER() over(PARTITION BY _log_id
ORDER BY
_inserted_timestamp DESC)) = 1

View File

@ -1,56 +0,0 @@
version: 2
models:
- name: silver_dex__quickswap_v3_swaps
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: AMOUNT0_UNADJ
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- decimal
- float
- name: AMOUNT1_UNADJ
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- decimal
- float
- name: BLOCK_NUMBER
tests:
- not_null
- name: BLOCK_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 1
- name: LIQUIDITY
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- decimal
- float
- name: EVENT_INDEX
tests:
- not_null
- name: POOL_ADDRESS
tests:
- not_null
- name: RECIPIENT
tests:
- not_null
- name: SENDER
tests:
- not_null
- name: TICK
tests:
- not_null
- name: TX_HASH
tests:
- not_null

Some files were not shown because too many files have changed in this diff Show More