This commit is contained in:
drethereum 2025-07-17 14:49:14 -06:00
parent 1ee8767455
commit ae9dfd3305
36 changed files with 3 additions and 4622 deletions

View File

@ -1,81 +0,0 @@
chain,short_coin_type
ailayer,0x0a59
ancient8,0xaaaa
Aptos,0x027d
Arbitrum,0x2329
Aurora,0x0a0a
Avalanche,0x2328
b2,0x00df
Base,0x2105
bb,0x1771
Beam,0x0504
Berachain,0x38de
bevm2,0x2ced
Bitcoin,0x0000
Bitlayer,0x10c5
Blast,0x1331
BNB Chain,0x02ca
Bob,0xed88
Celo,0xce10
ckb,0x0135
Coinweb,0x08ae
Conflux,0x01f7
Core,0x045c
corn,0x6f40
Crypto Chain,0x018a
duck,0x15a9
EOS,0x00c2
Ethereum,0x003c
Evmos,0x11bc
exsat,0x1c20
Fantom,0x03ef
Flow,0x021b
Gnosis,0x02bc
Harmony,0x03ff
hash,0x03ee
hemi,0xa867
inevm,0x09dd
iotex,0x1251
Kaia,0x2019
Kroma,0x00ff
Linea,0xe708
Lisk,0x046f
Manta,0x0263
Mantle,0x1388
Map,0x58f8
Merlin,0x6868
Metis,0x0440
Mode,0x868b
Moonbeam,0x0504
Moonriver,0x0505
Morph,0x0b02
Nautilus,0x56ce
Near,0x018d
Neox,0xba93
opBNB,0x00cc
Optimism,0x0266
Polygon POS,0x03c6
Polygon zkEVM,0x044d
rsk,0x001e
Scroll,0x2750
Sei,0x0531
SKALE Calypso,0x6c62
SKALE Europa,0x9296
SKALE Nebula,0xb4b1
Solana,0x01f5
Soneium,0x074c
Sonic,0x0092
Sui,0x0310
Taiko,0x28c5
Taker,0x0465
Terra,0x014a
Thorchain,0x03a3
Thunder,0x006c
Ton,0x025f
Tron,0x00c3
xlayer,0x00c4
zeta,0x1b58
zkFair,0xa70e
zklink,0x5cc4
zircuit,0xbf04
zkSync Era,0x0324
1 chain short_coin_type
2 ailayer 0x0a59
3 ancient8 0xaaaa
4 Aptos 0x027d
5 Arbitrum 0x2329
6 Aurora 0x0a0a
7 Avalanche 0x2328
8 b2 0x00df
9 Base 0x2105
10 bb 0x1771
11 Beam 0x0504
12 Berachain 0x38de
13 bevm2 0x2ced
14 Bitcoin 0x0000
15 Bitlayer 0x10c5
16 Blast 0x1331
17 BNB Chain 0x02ca
18 Bob 0xed88
19 Celo 0xce10
20 ckb 0x0135
21 Coinweb 0x08ae
22 Conflux 0x01f7
23 Core 0x045c
24 corn 0x6f40
25 Crypto Chain 0x018a
26 duck 0x15a9
27 EOS 0x00c2
28 Ethereum 0x003c
29 Evmos 0x11bc
30 exsat 0x1c20
31 Fantom 0x03ef
32 Flow 0x021b
33 Gnosis 0x02bc
34 Harmony 0x03ff
35 hash 0x03ee
36 hemi 0xa867
37 inevm 0x09dd
38 iotex 0x1251
39 Kaia 0x2019
40 Kroma 0x00ff
41 Linea 0xe708
42 Lisk 0x046f
43 Manta 0x0263
44 Mantle 0x1388
45 Map 0x58f8
46 Merlin 0x6868
47 Metis 0x0440
48 Mode 0x868b
49 Moonbeam 0x0504
50 Moonriver 0x0505
51 Morph 0x0b02
52 Nautilus 0x56ce
53 Near 0x018d
54 Neox 0xba93
55 opBNB 0x00cc
56 Optimism 0x0266
57 Polygon POS 0x03c6
58 Polygon zkEVM 0x044d
59 rsk 0x001e
60 Scroll 0x2750
61 Sei 0x0531
62 SKALE Calypso 0x6c62
63 SKALE Europa 0x9296
64 SKALE Nebula 0xb4b1
65 Solana 0x01f5
66 Soneium 0x074c
67 Sonic 0x0092
68 Sui 0x0310
69 Taiko 0x28c5
70 Taker 0x0465
71 Terra 0x014a
72 Thorchain 0x03a3
73 Thunder 0x006c
74 Ton 0x025f
75 Tron 0x00c3
76 xlayer 0x00c4
77 zeta 0x1b58
78 zkFair 0xa70e
79 zklink 0x5cc4
80 zircuit 0xbf04
81 zkSync Era 0x0324

View File

@ -1,102 +0,0 @@
destination_chain,standard_destination_chain
acala,acala
algorand,algorand
aptos,aptos
arbitrum,arbitrum
arbitrum nova,arbitrum nova
arbitrum one,arbitrum
archway,archway
astar,astar
aurora,aurora
aurora mainnet,aurora
avalanche,avalanche
avalanche c-chain,avalanche
base,base
bnb,bsc
bnb chain,bsc
bnb smart chain mainnet,bsc
boba bnb mainnet,boba
boba network,boba
bsc,bsc
canto,canto
carbon,carbon
celo,celo
celo mainnet,celo
coinweb,coinweb
conflux,conflux
conflux espace,conflux
core,core
coredao,core
core blockchain,core
crab network,crab
crescent,crescent
cronos mainnet,cronos
crypto chain,crypto
dfk chain,dfk
defi kingdoms,dfk
dogechain mainnet,dogechain
eos,eos
ethereum,ethereum
ethereum mainnet,ethereum
evmos,evmos
fantom,fantom
fantom opera,fantom
filecoin,filecoin
fuse,fuse
gnosis,gnosis
harmony mainnet shard 0,harmony
huobi eco chain mainnet,huobi eco
injective,injective
juno,juno
karura,karura
kava,kava
klaytn,klaytn
klaytn mainnet cypress,klaytn
kujira,kujira
linea,linea
manta,manta
mantle,mantle
metis,metis
metis andromeda mainnet,metis
moonbeam,moonbeam
moonriver,moonriver
nautilus,nautilus
near,near
neutron,neutron
oasis,oasis
okxchain mainnet,okxchain
ontology mainnet,ontology
op mainnet,optimism
opbnb,opbnb
optimism,optimism
osmosis,osmosis
polygon,polygon
polygon mainnet,polygon
polygon pos,polygon
polygon zkevm,polygon zkevm
ronin,ronin
scroll,scroll
secret-snip,secret
sei,sei
skale europa,skale europa
skale nebula,skale nebula
solana,solana
stargaze,stargaze
starknet,starknet
sui,sui
telos evm mainnet,telos
terra,terra
terra-2,terra2
terra2,terra2
tezos,tezos
tron,tron
umee,umee
waves,waves
xpla,xpla
xrpl,xrpl
zkfair,zkfair
zksync era,zksync era
zksync era mainnet,zksync era
zksync lite,zksync lite
zora,zora
zzz,zzz
1 destination_chain standard_destination_chain
2 acala acala
3 algorand algorand
4 aptos aptos
5 arbitrum arbitrum
6 arbitrum nova arbitrum nova
7 arbitrum one arbitrum
8 archway archway
9 astar astar
10 aurora aurora
11 aurora mainnet aurora
12 avalanche avalanche
13 avalanche c-chain avalanche
14 base base
15 bnb bsc
16 bnb chain bsc
17 bnb smart chain mainnet bsc
18 boba bnb mainnet boba
19 boba network boba
20 bsc bsc
21 canto canto
22 carbon carbon
23 celo celo
24 celo mainnet celo
25 coinweb coinweb
26 conflux conflux
27 conflux espace conflux
28 core core
29 coredao core
30 core blockchain core
31 crab network crab
32 crescent crescent
33 cronos mainnet cronos
34 crypto chain crypto
35 dfk chain dfk
36 defi kingdoms dfk
37 dogechain mainnet dogechain
38 eos eos
39 ethereum ethereum
40 ethereum mainnet ethereum
41 evmos evmos
42 fantom fantom
43 fantom opera fantom
44 filecoin filecoin
45 fuse fuse
46 gnosis gnosis
47 harmony mainnet shard 0 harmony
48 huobi eco chain mainnet huobi eco
49 injective injective
50 juno juno
51 karura karura
52 kava kava
53 klaytn klaytn
54 klaytn mainnet cypress klaytn
55 kujira kujira
56 linea linea
57 manta manta
58 mantle mantle
59 metis metis
60 metis andromeda mainnet metis
61 moonbeam moonbeam
62 moonriver moonriver
63 nautilus nautilus
64 near near
65 neutron neutron
66 oasis oasis
67 okxchain mainnet okxchain
68 ontology mainnet ontology
69 op mainnet optimism
70 opbnb opbnb
71 optimism optimism
72 osmosis osmosis
73 polygon polygon
74 polygon mainnet polygon
75 polygon pos polygon
76 polygon zkevm polygon zkevm
77 ronin ronin
78 scroll scroll
79 secret-snip secret
80 sei sei
81 skale europa skale europa
82 skale nebula skale nebula
83 solana solana
84 stargaze stargaze
85 starknet starknet
86 sui sui
87 telos evm mainnet telos
88 terra terra
89 terra-2 terra2
90 terra2 terra2
91 tezos tezos
92 tron tron
93 umee umee
94 waves waves
95 xpla xpla
96 xrpl xrpl
97 zkfair zkfair
98 zksync era zksync era
99 zksync era mainnet zksync era
100 zksync lite zksync lite
101 zora zora
102 zzz zzz

View File

@ -1,117 +0,0 @@
chain_name,chain_id
Abstract,324
Animechain,372
Ape,312
Aptos,108
Arbitrum,110
Arbitrum Nova,175
Astar zkEVM,257
Avalanche,106
BSC,102
BOB,279
Bahamut,363
Base,184
Beam,198
Berachain,362
Bevm,317
Bitlayer,314
Blast,243
Bouncebit,293
Canto,159
Celo,125
Codex,323
Concrete,366
Conflux eSpace,212
CoreDAO,153
Corn,331
Cronos EVM,359
Cronos zkEVM,360
Cyber,283
DFK,115
DM2 Verse,315
DOS Chain,149
Degen,267
Dexalot,118
EDU Chain,328
Flow EVM,336
Ethereum,101
Etherlink,292
Fantom,112
Flare,295
Fraxtal,255
Fuse,138
Glue,342
Gnosis,145
Goat,361
Gravity,294
Gunz,371
Harmony,116
Hedera,316
Hemi,329
Homeverse,265
Horizen EON,215
HyperEVM,367
Ink,339
Japan Open Chain,285
Kaia,150
Kava,177
Lens,373
Lightlink,309
Linea,183
Lisk,321
Loot,197
Lyra,311
Manta,217
Mantle,181
Merlin,266
Meter,176
Metis,151
Mode,260
Moonbeam,126
Moonriver,167
Morph,322
Aurora,211
Nibiru,369
OKX,155
Optimism,111
Orderly,213
Otherworld Space,341
Peaq,302
Plume,370
Polygon,109
Polygon zkEVM,158
Rari Chain,235
Reya,313
Rootstock,333
Sanko,278
Scroll,214
Sei,280
Shimmer,230
Skale,273
Soneium,340
Sonic Mainnet,332
Sophon,334
Story,364
Subtensor EVM,374
Superposition,327
Swell,335
Taiko,290
TelosEVM,199
Tenet,173
Tiltyard,238
Unichain,320
Vana,330
Viction,196
Worldchain,319
XChain,291
XDC,365
XLayer,274
XPLA,216
Xai,236
Zircuit,303
Zora,195
inEVM,234
opBNB,202
re.al,237
zkLink,301
zkSync Era,165
1 chain_name chain_id
2 Abstract 324
3 Animechain 372
4 Ape 312
5 Aptos 108
6 Arbitrum 110
7 Arbitrum Nova 175
8 Astar zkEVM 257
9 Avalanche 106
10 BSC 102
11 BOB 279
12 Bahamut 363
13 Base 184
14 Beam 198
15 Berachain 362
16 Bevm 317
17 Bitlayer 314
18 Blast 243
19 Bouncebit 293
20 Canto 159
21 Celo 125
22 Codex 323
23 Concrete 366
24 Conflux eSpace 212
25 CoreDAO 153
26 Corn 331
27 Cronos EVM 359
28 Cronos zkEVM 360
29 Cyber 283
30 DFK 115
31 DM2 Verse 315
32 DOS Chain 149
33 Degen 267
34 Dexalot 118
35 EDU Chain 328
36 Flow EVM 336
37 Ethereum 101
38 Etherlink 292
39 Fantom 112
40 Flare 295
41 Fraxtal 255
42 Fuse 138
43 Glue 342
44 Gnosis 145
45 Goat 361
46 Gravity 294
47 Gunz 371
48 Harmony 116
49 Hedera 316
50 Hemi 329
51 Homeverse 265
52 Horizen EON 215
53 HyperEVM 367
54 Ink 339
55 Japan Open Chain 285
56 Kaia 150
57 Kava 177
58 Lens 373
59 Lightlink 309
60 Linea 183
61 Lisk 321
62 Loot 197
63 Lyra 311
64 Manta 217
65 Mantle 181
66 Merlin 266
67 Meter 176
68 Metis 151
69 Mode 260
70 Moonbeam 126
71 Moonriver 167
72 Morph 322
73 Aurora 211
74 Nibiru 369
75 OKX 155
76 Optimism 111
77 Orderly 213
78 Otherworld Space 341
79 Peaq 302
80 Plume 370
81 Polygon 109
82 Polygon zkEVM 158
83 Rari Chain 235
84 Reya 313
85 Rootstock 333
86 Sanko 278
87 Scroll 214
88 Sei 280
89 Shimmer 230
90 Skale 273
91 Soneium 340
92 Sonic Mainnet 332
93 Sophon 334
94 Story 364
95 Subtensor EVM 374
96 Superposition 327
97 Swell 335
98 Taiko 290
99 TelosEVM 199
100 Tenet 173
101 Tiltyard 238
102 Unichain 320
103 Vana 330
104 Viction 196
105 Worldchain 319
106 XChain 291
107 XDC 365
108 XLayer 274
109 XPLA 216
110 Xai 236
111 Zircuit 303
112 Zora 195
113 inEVM 234
114 opBNB 202
115 re.al 237
116 zkLink 301
117 zkSync Era 165

View File

@ -101,6 +101,8 @@ models:
+enabled: false
bridge:
+enabled: true
dex:
+enabled: true
balances_package:
+enabled: true
scores_package:

View File

@ -1,27 +0,0 @@
{{ config(
materialized = 'view',
persist_docs ={ "relation": true,
"columns": true },
meta ={ 'database_tags':{ 'table':{ 'PROTOCOL': 'SUSHI, BITFLUX, GLYPH, COREX',
'PURPOSE': 'DEX, LIQUIDITY, POOLS, LP, SWAPS',}}},
tags = ['gold','defi','dex','curated']
) }}
SELECT
block_number AS creation_block,
block_timestamp AS creation_time,
tx_hash AS creation_tx,
platform,
contract_address AS factory_address,
pool_address,
pool_name,
tokens,
symbols,
decimals,
{{ dbt_utils.generate_surrogate_key(
['pool_address']
) }} AS dim_dex_liquidity_pools_id,
inserted_timestamp,
modified_timestamp
FROM
{{ ref('silver_dex__complete_dex_liquidity_pools') }}

View File

@ -1,54 +0,0 @@
{{ config(
materialized = 'view',
persist_docs ={ "relation": true,
"columns": true },
meta ={
'database_tags':{
'table':{
'PROTOCOL': 'MESON, STARGATE, SYMBIOSIS, GASZIP, COREBRIDGE, GASZIP, LAYERZERO',
'PURPOSE': 'BRIDGE'
} } },
tags = ['gold','defi','bridge','curated','ez']
) }}
SELECT
block_number,
block_timestamp,
origin_from_address,
origin_to_address,
origin_function_signature,
tx_hash,
event_index,
bridge_address,
event_name,
platform,
sender,
receiver,
destination_chain_receiver,
COALESCE(
c.standardized_name,
b.destination_chain
) AS destination_chain,
destination_chain_id,
token_address,
token_symbol,
amount_unadj,
amount,
ROUND(
CASE
WHEN amount_usd < 1e+15 THEN amount_usd
ELSE NULL
END,
2
) AS amount_usd,
token_is_verified,
{{ dbt_utils.generate_surrogate_key(
['_id']
) }} AS ez_bridge_activity_id,
inserted_timestamp,
modified_timestamp
FROM
{{ ref('silver_bridge__complete_bridge_activity') }}
b
LEFT JOIN {{ ref('silver_bridge__standard_chain_seed') }} C
ON b.destination_chain = C.variation

View File

@ -1,66 +0,0 @@
{{ config(
materialized = 'view',
persist_docs ={ "relation": true,
"columns": true },
meta ={ 'database_tags':{ 'table':{ 'PROTOCOL': 'SUSHI, BITFLUX, GLYPH, COREX',
'PURPOSE': 'DEX, SWAPS' }}},
tags = ['gold','defi','dex','curated','ez']
) }}
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
pool_name,
event_name,
amount_in_unadj,
amount_in,
ROUND(
CASE
WHEN token_in <> '0x191e94fa59739e188dce837f7f6978d84727ad01' or not token_in_is_verified
AND (
amount_out_usd IS NULL
OR ABS((amount_in_usd - amount_out_usd) / NULLIF(amount_out_usd, 0)) > 0.75
OR ABS((amount_in_usd - amount_out_usd) / NULLIF(amount_in_usd, 0)) > 0.75
) THEN NULL
ELSE amount_in_usd
END,
2
) AS amount_in_usd,
amount_out_unadj,
amount_out,
ROUND(
CASE
WHEN token_out <> '0x191e94fa59739e188dce837f7f6978d84727ad01' or not token_out_is_verified
AND (
amount_in_usd IS NULL
OR ABS((amount_out_usd - amount_in_usd) / NULLIF(amount_in_usd, 0)) > 0.75
OR ABS((amount_out_usd - amount_in_usd) / NULLIF(amount_out_usd, 0)) > 0.75
) THEN NULL
ELSE amount_out_usd
END,
2
) AS amount_out_usd,
sender,
tx_to,
event_index,
platform,
protocol,
version as protocol_version,
token_in,
token_in_is_verified,
token_out,
token_out_is_verified,
symbol_in,
symbol_out,
{{ dbt_utils.generate_surrogate_key(
['tx_hash','event_index']
) }} AS ez_dex_swaps_id,
inserted_timestamp,
modified_timestamp
FROM
{{ ref('silver_dex__complete_dex_swaps') }}

View File

@ -1,78 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = "block_number",
cluster_by = ['block_timestamp::DATE'],
tags = ['silver_bridge','defi','bridge','curated']
) }}
WITH unwrap_token AS (
SELECT
block_number,
block_timestamp,
origin_from_address,
origin_to_address,
origin_function_signature,
contract_address,
tx_hash,
event_index,
'UnwrapToken' AS event_name,
'core-bridge' AS platform,
'v1' AS version,
regexp_substr_all(SUBSTR(DATA, 3, len(DATA)), '.{64}') AS segmented_data,
CONCAT('0x', SUBSTR(segmented_data [0] :: STRING, 25, 40)) AS local_token,
CONCAT('0x', SUBSTR(segmented_data [1] :: STRING, 25, 40)) AS remote_token,
TRY_TO_NUMBER(utils.udf_hex_to_int(segmented_data [2] :: STRING)) AS remote_chain_id,
CONCAT('0x', SUBSTR(segmented_data [3] :: STRING, 25, 40)) AS to_address,
TRY_TO_NUMBER(utils.udf_hex_to_int(segmented_data [4] :: STRING)) AS amount_unadj,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__fact_event_logs') }}
WHERE
contract_address = '0xa4218e1f39da4aadac971066458db56e901bcbde'
AND topic_0 = '0x3b661011d9e0ff8f0dc432bac4ed79eabf70cf52596ed9de985810ef36689e9e'
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
)
SELECT
block_number,
block_timestamp,
origin_from_address,
origin_to_address,
origin_function_signature,
tx_hash,
event_index,
contract_address AS bridge_address,
contract_address,
event_name,
platform,
'v1' AS version,
origin_from_address AS sender,
to_address AS receiver,
receiver AS destination_chain_receiver,
remote_chain_id :: STRING AS destination_chain_id,
s.chain_name AS destination_chain,
local_token AS token_address,
amount_unadj,
_log_id,
_inserted_timestamp
FROM
unwrap_token
LEFT JOIN {{ ref('silver_bridge__stargate_chain_id_seed') }}
s
ON unwrap_token.remote_chain_id :: STRING = s.chain_id :: STRING

View File

@ -1,70 +0,0 @@
version: 2
models:
- name: silver_bridge__corebridge_unwraptoken
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: BLOCK_TIMESTAMP
tests:
- not_null
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null
- name: ORIGIN_FROM_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: ORIGIN_TO_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: EVENT_INDEX
tests:
- not_null
- name: EVENT_NAME
tests:
- not_null
- name: BRIDGE_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: SENDER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: RECEIVER
tests:
- not_null
- name: DESTINATION_CHAIN_RECEIVER
tests:
- not_null
- name: AMOUNT_UNADJ
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- DECIMAL
- FLOAT
- NUMBER
- name: TOKEN_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: _INSERTED_TIMESTAMP
tests:
- not_null

View File

@ -1,156 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = "block_number",
cluster_by = ['block_timestamp::DATE'],
tags = ['silver_bridge','defi','bridge','curated']
) }}
WITH senddeposits AS (
-- gaszip lz v2 event (only 1 per tx)
SELECT
block_number,
block_timestamp,
origin_from_address,
origin_to_address,
origin_function_signature,
tx_hash,
event_index,
contract_address,
regexp_substr_all(SUBSTR(DATA, 3, len(DATA)), '.{64}') AS segmented_data,
CONCAT('0x', SUBSTR(segmented_data [1] :: STRING, 25, 40)) AS to_address,
TRY_TO_NUMBER(utils.udf_hex_to_int(segmented_data [2] :: STRING)) AS VALUE,
TRY_TO_NUMBER(utils.udf_hex_to_int(segmented_data [3] :: STRING)) AS fee,
CONCAT('0x', SUBSTR(segmented_data [4] :: STRING, 25, 40)) AS from_address,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__fact_event_logs') }}
WHERE
contract_address = '0x26da582889f59eaae9da1f063be0140cd93e6a4f' -- gaszip l0 v2
AND topic_0 = '0xa22a487af6300dc77db439586e8ce7028fd7f1d734efd33b287bc1e2af4cd162' -- senddeposits
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
),
packetsent AS (
-- pulls lz packetsent events from gaszip txs only (1 packet per chain, may have >1 per tx)
SELECT
tx_hash,
event_index,
DATA,
CONCAT('0x', SUBSTR(DATA, 155, 40)) AS send_lib,
utils.udf_hex_to_int(SUBSTR(DATA, 261, 16)) AS nonce,
utils.udf_hex_to_int(SUBSTR(DATA, 277, 8)) AS srcEid,
CONCAT('0x', SUBSTR(DATA, 258 + 18 + 8 + 25, 40)) AS src_app_address,
utils.udf_hex_to_int(SUBSTR(DATA, 258 + 18 + 8 + 64 + 1, 8)) AS dstEid,
CONCAT('0x', SUBSTR(DATA, 258 + 18 + 8 + 64 + 8 + 25, 40)) AS dst_app_address,
TRY_TO_NUMBER(utils.udf_hex_to_int(SUBSTR(DATA, 630 + 1, 32))) AS native_amount,
CONCAT('0x', SUBSTR(DATA, 630 + 1 + 32 + 24, 40)) AS receiver,
ROW_NUMBER() over (
PARTITION BY tx_hash
ORDER BY
event_index ASC
) event_rank
FROM
{{ ref('core__fact_event_logs') }}
WHERE
contract_address = '0x1a44076050125825900e736c501f859c50fe728c' -- l0 endpoint v2
AND topic_0 = '0x1ab700d4ced0c005b164c0f789fd09fcbb0156d4c2041b8a3bfbcd961cd1567f' -- packetsent
AND tx_hash IN (
SELECT
tx_hash
FROM
senddeposits
)
AND tx_succeeded
{% if is_incremental() %}
AND modified_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND modified_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
),
nativetransfers AS (
-- pulls native transfers in gaszip lz v2 bridging
SELECT
tx_hash,
TRY_TO_NUMBER(amount_precise_raw) AS amount_precise_raw,
'0x40375c92d9faf44d2f9db9bd9ba41a3317a2404f' AS token_address,
-- wrapped native
ROW_NUMBER() over (
PARTITION BY tx_hash
ORDER BY
trace_index ASC
) transfer_rank
FROM
{{ ref('core__ez_native_transfers') }}
WHERE
from_address = '0x1a44076050125825900e736c501f859c50fe728c' -- l0 endpoint v2
AND to_address = '0x0bcac336466ef7f1e0b5c184aab2867c108331af' -- SendUln302
AND tx_hash IN (
SELECT
tx_hash
FROM
senddeposits
)
{% if is_incremental() %}
AND modified_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND modified_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
)
SELECT
block_number,
block_timestamp,
origin_from_address,
origin_to_address,
origin_function_signature,
s.tx_hash,
p.event_index,
-- joins on packetsent event index instead of senddeposits for uniqueness
'SendDeposit' AS event_name,
'gaszip-lz-v2' AS platform,
'v2' AS version,
contract_address AS bridge_address,
contract_address,
from_address AS sender,
receiver,
receiver AS destination_chain_receiver,
nonce,
dstEid AS destination_chain_id,
chain AS destination_chain,
amount_precise_raw AS amount_unadj,
token_address,
CONCAT(
s.tx_hash :: STRING,
'-',
p.event_index :: STRING
) AS _log_id,
_inserted_timestamp
FROM
senddeposits s
INNER JOIN packetsent p
ON s.tx_hash = p.tx_hash
LEFT JOIN nativetransfers t
ON p.tx_hash = t.tx_hash
AND event_rank = transfer_rank
LEFT JOIN {{ ref('silver_bridge__layerzero_v2_bridge_seed') }}
ON dstEid = eid

View File

@ -1,70 +0,0 @@
version: 2
models:
- name: silver_bridge__gaszip_lz_sentdeposits
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: BLOCK_TIMESTAMP
tests:
- not_null
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null
- name: ORIGIN_FROM_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: ORIGIN_TO_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: EVENT_INDEX
tests:
- not_null
- name: EVENT_NAME
tests:
- not_null
- name: BRIDGE_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: SENDER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: RECEIVER
tests:
- not_null
- name: DESTINATION_CHAIN_RECEIVER
tests:
- not_null
- name: AMOUNT_UNADJ
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- DECIMAL
- FLOAT
- NUMBER
- name: TOKEN_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: _INSERTED_TIMESTAMP
tests:
- not_null

View File

@ -1,189 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = "block_number",
cluster_by = ['block_timestamp::DATE'],
tags = ['silver_bridge','defi','bridge','curated']
) }}
WITH token_transfers AS (
SELECT
block_number,
block_timestamp,
origin_function_signature,
origin_from_address,
origin_to_address,
tx_hash,
event_index,
contract_address,
from_address,
to_address,
raw_amount,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__ez_token_transfers') }}
WHERE
from_address <> '0x0000000000000000000000000000000000000000'
AND to_address = '0x25ab3efd52e6470681ce037cd546dc60726948d3'
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
),
native_transfers AS (
SELECT
et.block_number,
et.block_timestamp,
et.tx_hash,
tx.from_address AS origin_from_address,
tx.to_address AS origin_to_address,
tx.origin_function_signature,
et.from_address,
et.to_address,
amount_precise_raw,
et.ez_native_transfers_id AS _call_id,
et.modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__ez_native_transfers') }}
et
INNER JOIN {{ ref('core__fact_transactions') }}
tx
ON et.block_number = tx.block_number
AND et.tx_hash = tx.tx_hash
WHERE
et.to_address = '0x25ab3efd52e6470681ce037cd546dc60726948d3'
and et.tx_hash not in (select tx_hash from token_transfers)
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
{% endif %}
),
all_transfers AS (
SELECT
block_number,
block_timestamp,
origin_from_address,
origin_to_address,
origin_function_signature,
tx_hash,
event_index,
'Transfer' AS event_name,
to_address AS bridge_address,
from_address AS sender,
to_address AS receiver,
raw_amount AS amount_unadj,
contract_address AS token_address,
{{ dbt_utils.generate_surrogate_key(
['_log_id']
) }} AS _id,
_inserted_timestamp
FROM
token_transfers
UNION ALL
SELECT
block_number,
block_timestamp,
origin_from_address,
origin_to_address,
origin_function_signature,
tx_hash,
NULL AS event_index,
NULL AS event_name,
to_address AS bridge_address,
from_address AS sender,
to_address AS receiver,
amount_precise_raw AS amount_unadj,
LOWER('0xbb4CdB9CBd36B01bD1cBaEBF2De08d9173bc095c') AS token_address,
{{ dbt_utils.generate_surrogate_key(
['_call_id']
) }} AS _id,
_inserted_timestamp
FROM
native_transfers
),
dst_info AS (
SELECT
block_number,
tx_hash,
topics [1] :: STRING AS encoded_data,
SUBSTR(RIGHT(encoded_data, 12), 1, 4) AS destination_chain_id,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__fact_event_logs') }}
WHERE
contract_address = '0x25ab3efd52e6470681ce037cd546dc60726948d3'
AND topic_0 IN (
'0x5ce4019f772fda6cb703b26bce3ec3006eb36b73f1d3a0eb441213317d9f5e9d',
'0x8d92c805c252261fcfff21ee60740eb8a38922469a7e6ee396976d57c22fc1c9'
)
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '16 hours'
FROM
{{ this }}
)
{% endif %}
)
SELECT
t.block_number,
block_timestamp,
origin_from_address,
origin_to_address,
origin_function_signature,
t.tx_hash,
event_index,
event_name,
'meson' AS platform,
bridge_address,
sender,
receiver,
CASE
WHEN origin_from_address = '0x0000000000000000000000000000000000000000' THEN sender
ELSE origin_from_address
END AS destination_chain_receiver,
amount_unadj,
destination_chain_id,
COALESCE(LOWER(chain), 'other') AS destination_chain,
token_address,
_id,
t._inserted_timestamp
FROM
all_transfers t
INNER JOIN dst_info d
ON t.tx_hash = d.tx_hash
AND t.block_number = d.block_number
LEFT JOIN {{ ref('silver_bridge__meson_chain_id_seed') }}
s
ON d.destination_chain_id :: STRING = RIGHT(
s.short_coin_type,
4
) :: STRING
WHERE
origin_to_address IS NOT NULL qualify (ROW_NUMBER() over (PARTITION BY _id
ORDER BY
t._inserted_timestamp DESC)) = 1

View File

@ -1,50 +0,0 @@
version: 2
models:
- name: silver_bridge__meson_transfer
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: BLOCK_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_LTZ
- TIMESTAMP_NTZ
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TOKEN_ADDRESS
tests:
- not_null
- name: SENDER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: DESTINATION_CHAIN_RECEIVER
tests:
- not_null
- name: BRIDGE_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: AMOUNT_UNADJ
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- DECIMAL
- FLOAT
- NUMBER
- name: _INSERTED_TIMESTAMP
tests:
- not_null

View File

@ -1,572 +0,0 @@
-- depends_on: {{ ref('silver__complete_token_prices') }}
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = ['block_number','platform','version'],
cluster_by = ['block_timestamp::DATE','platform'],
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION ON EQUALITY(tx_hash, origin_from_address, origin_to_address, origin_function_signature, bridge_address, sender, receiver, destination_chain_receiver, destination_chain_id, destination_chain, token_address, token_symbol), SUBSTRING(origin_function_signature, bridge_address, sender, receiver, destination_chain_receiver, destination_chain, token_address, token_symbol)",
tags = ['silver_bridge','defi','bridge','curated','heal']
) }}
WITH corebridge AS (
SELECT
block_number,
block_timestamp,
origin_from_address,
origin_to_address,
origin_function_signature,
tx_hash,
event_index,
bridge_address,
event_name,
platform,
version,
sender,
receiver,
destination_chain_receiver,
destination_chain_id :: STRING AS destination_chain_id,
destination_chain,
token_address,
NULL AS token_symbol,
amount_unadj,
_log_id AS _id,
_inserted_timestamp
FROM
{{ ref('silver_bridge__corebridge_unwraptoken') }}
{% if is_incremental() and 'corebridge' not in var('HEAL_MODELS') %}
WHERE
_inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '{{ var("LOOKBACK", "4 hours") }}'
FROM
{{ this }}
)
{% endif %}
),
gaszip_lz AS (
SELECT
block_number,
block_timestamp,
origin_from_address,
origin_to_address,
origin_function_signature,
tx_hash,
event_index,
bridge_address,
event_name,
platform,
version,
sender,
receiver,
destination_chain_receiver,
destination_chain_id :: STRING AS destination_chain_id,
destination_chain,
token_address,
NULL AS token_symbol,
amount_unadj,
_log_id AS _id,
_inserted_timestamp
FROM
{{ ref('silver_bridge__gaszip_lz_sentdeposits') }}
{% if is_incremental() and 'gaszip_lz' not in var('HEAL_MODELS') %}
WHERE
_inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '{{ var("LOOKBACK", "4 hours") }}'
FROM
{{ this }}
)
{% endif %}
),
layerzero_v2 AS (
SELECT
block_number,
block_timestamp,
origin_from_address,
origin_to_address,
origin_function_signature,
tx_hash,
event_index,
bridge_address,
event_name,
platform,
version,
sender,
receiver,
destination_chain_receiver,
destination_chain_id :: STRING AS destination_chain_id,
destination_chain,
token_address,
NULL AS token_symbol,
amount_unadj,
_log_id AS _id,
inserted_timestamp AS _inserted_timestamp
FROM
{{ ref('silver_bridge__layerzero_v2') }}
{% if is_incremental() and 'layerzero_v2' not in var('HEAL_MODELS') %}
WHERE
_inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '{{ var("LOOKBACK", "4 hours") }}'
FROM
{{ this }}
)
{% endif %}
),
meson AS (
SELECT
block_number,
block_timestamp,
origin_from_address,
origin_to_address,
origin_function_signature,
tx_hash,
event_index,
bridge_address,
event_name,
platform,
'v1' AS version,
sender,
receiver,
destination_chain_receiver,
destination_chain_id :: STRING AS destination_chain_id,
destination_chain,
token_address,
NULL AS token_symbol,
amount_unadj,
_id,
_inserted_timestamp
FROM
{{ ref('silver_bridge__meson_transfer') }}
{% if is_incremental() and 'meson' not in var('HEAL_MODELS') %}
WHERE
_inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '{{ var("LOOKBACK", "4 hours") }}'
FROM
{{ this }}
)
{% endif %}
),
stargate_v2 AS (
SELECT
block_number,
block_timestamp,
origin_from_address,
origin_to_address,
origin_function_signature,
tx_hash,
event_index,
bridge_address,
event_name,
platform,
version,
sender,
receiver,
destination_chain_receiver,
destination_chain_id :: STRING AS destination_chain_id,
destination_chain,
token_address,
NULL AS token_symbol,
amount_unadj,
_log_id AS _id,
inserted_timestamp AS _inserted_timestamp
FROM
{{ ref('silver_bridge__stargate_v2') }}
{% if is_incremental() and 'stargate_v2' not in var('HEAL_MODELS') %}
WHERE
_inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '{{ var("LOOKBACK", "4 hours") }}'
FROM
{{ this }}
)
{% endif %}
),
symbiosis AS (
SELECT
block_number,
block_timestamp,
origin_from_address,
origin_to_address,
origin_function_signature,
tx_hash,
event_index,
bridge_address,
event_name,
platform,
'v1' AS version,
sender,
receiver,
destination_chain_receiver,
destination_chain_id :: STRING AS destination_chain_id,
NULL AS destination_chain,
token_address,
NULL AS token_symbol,
amount_unadj,
_log_id AS _id,
_inserted_timestamp
FROM
{{ ref('silver_bridge__symbiosis_synthesizerequest') }}
{% if is_incremental() and 'symbiosis' not in var('HEAL_MODELS') %}
WHERE
_inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '{{ var("LOOKBACK", "4 hours") }}'
FROM
{{ this }}
)
{% endif %}
),
all_protocols AS (
SELECT
*
FROM
corebridge
UNION ALL
SELECT
*
FROM
gaszip_lz
UNION ALL
SELECT
*
FROM
layerzero_v2
UNION ALL
SELECT
*
FROM
meson
UNION ALL
SELECT
*
FROM
stargate_v2
UNION ALL
SELECT
*
FROM
symbiosis
),
complete_bridge_activity AS (
SELECT
block_number,
block_timestamp,
origin_from_address,
origin_to_address,
origin_function_signature,
tx_hash,
event_index,
bridge_address,
event_name,
platform,
version,
sender,
receiver,
destination_chain_receiver,
CASE
WHEN CONCAT(
platform,
'-',
version
) IN (
'meson-v1',
'gaszip-lz-v2-v2',
'core-bridge-v1',
'layerzero-v2',
'stargate-v2'
) THEN destination_chain_id :: STRING
WHEN d.chain_id IS NULL THEN destination_chain_id :: STRING
ELSE d.chain_id :: STRING
END AS destination_chain_id,
CASE
WHEN CONCAT(
platform,
'-',
version
) IN (
'meson-v1',
'gaszip-lz-v2-v2',
'core-bridge-v1',
'layerzero-v2',
'stargate-v2'
) THEN LOWER(destination_chain)
WHEN d.chain IS NULL THEN LOWER(destination_chain)
ELSE LOWER(
d.chain
)
END AS destination_chain,
b.token_address,
CASE
WHEN platform = 'axelar' THEN COALESCE(
C.token_symbol,
b.token_symbol
)
ELSE C.token_symbol
END AS token_symbol,
C.token_decimals AS token_decimals,
amount_unadj,
CASE
WHEN C.token_decimals IS NOT NULL THEN (amount_unadj / pow(10, C.token_decimals))
ELSE amount_unadj
END AS amount,
CASE
WHEN C.token_decimals IS NOT NULL THEN ROUND(
amount * p.price,
2
)
ELSE NULL
END AS amount_usd,
p.is_verified as token_is_verified,
_id,
b._inserted_timestamp
FROM
all_protocols b
LEFT JOIN {{ ref('silver__contracts') }} C
ON b.token_address = C.contract_address
LEFT JOIN {{ ref('price__ez_prices_hourly') }}
p
ON b.token_address = p.token_address
AND DATE_TRUNC(
'hour',
block_timestamp
) = p.hour
LEFT JOIN {{ source(
'external_gold_defillama',
'dim_chains'
) }}
d
ON d.chain_id :: STRING = b.destination_chain_id :: STRING
OR LOWER(
d.chain
) = LOWER(
b.destination_chain
)
),
{% if is_incremental() and var(
'HEAL_MODEL'
) %}
heal_model AS (
SELECT
block_number,
block_timestamp,
origin_from_address,
origin_to_address,
origin_function_signature,
tx_hash,
event_index,
bridge_address,
event_name,
platform,
version,
sender,
receiver,
destination_chain_receiver,
destination_chain_id,
destination_chain,
t0.token_address,
C.token_symbol AS token_symbol,
C.token_decimals AS token_decimals,
amount_unadj,
CASE
WHEN C.token_decimals IS NOT NULL THEN (amount_unadj / pow(10, C.token_decimals))
ELSE amount_unadj
END AS amount_heal,
CASE
WHEN C.token_decimals IS NOT NULL THEN amount_heal * p.price
ELSE NULL
END AS amount_usd_heal,
p.is_verified as token_is_verified,
_id,
t0._inserted_timestamp
FROM
{{ this }}
t0
LEFT JOIN {{ ref('silver__contracts') }} C
ON t0.token_address = C.contract_address
LEFT JOIN {{ ref('price__ez_prices_hourly') }}
p
ON t0.token_address = p.token_address
AND DATE_TRUNC(
'hour',
block_timestamp
) = p.hour
WHERE
CONCAT(
t0.block_number,
'-',
t0.platform,
'-',
t0.version
) IN (
SELECT
CONCAT(
t1.block_number,
'-',
t1.platform,
'-',
t1.version
)
FROM
{{ this }}
t1
WHERE
t1.token_decimals IS NULL
AND t1._inserted_timestamp < (
SELECT
MAX(
_inserted_timestamp
) - INTERVAL '{{ var("LOOKBACK", "4 hours") }}'
FROM
{{ this }}
)
AND EXISTS (
SELECT
1
FROM
{{ ref('silver__contracts') }} C
WHERE
C._inserted_timestamp > DATEADD('DAY', -14, SYSDATE())
AND C.token_decimals IS NOT NULL
AND C.contract_address = t1.token_address)
GROUP BY
1
)
OR CONCAT(
t0.block_number,
'-',
t0.platform,
'-',
t0.version
) IN (
SELECT
CONCAT(
t2.block_number,
'-',
t2.platform,
'-',
t2.version
)
FROM
{{ this }}
t2
WHERE
t2.amount_usd IS NULL
AND t2._inserted_timestamp < (
SELECT
MAX(
_inserted_timestamp
) - INTERVAL '{{ var("LOOKBACK", "4 hours") }}'
FROM
{{ this }}
)
AND EXISTS (
SELECT
1
FROM
{{ ref('silver__complete_token_prices') }}
p
WHERE
p._inserted_timestamp > DATEADD('DAY', -14, SYSDATE())
AND p.price IS NOT NULL
AND p.token_address = t2.token_address
AND p.hour = DATE_TRUNC(
'hour',
t2.block_timestamp
)
)
GROUP BY
1
)
),
{% endif %}
FINAL AS (
SELECT
*
FROM
complete_bridge_activity
{% if is_incremental() and var(
'HEAL_MODEL'
) %}
UNION ALL
SELECT
block_number,
block_timestamp,
origin_from_address,
origin_to_address,
origin_function_signature,
tx_hash,
event_index,
bridge_address,
event_name,
platform,
version,
sender,
receiver,
destination_chain_receiver,
destination_chain_id,
destination_chain,
token_address,
token_symbol,
token_decimals,
amount_unadj,
amount_heal AS amount,
amount_usd_heal AS amount_usd,
token_is_verified,
_id,
_inserted_timestamp
FROM
heal_model
{% endif %}
)
SELECT
block_number,
block_timestamp,
origin_from_address,
origin_to_address,
origin_function_signature,
tx_hash,
event_index,
bridge_address,
event_name,
platform,
version,
sender,
receiver,
destination_chain_receiver,
destination_chain_id,
destination_chain,
token_address,
token_symbol,
token_decimals,
amount_unadj,
amount,
amount_usd,
ifnull(token_is_verified, false) as token_is_verified,
_id,
_inserted_timestamp,
{{ dbt_utils.generate_surrogate_key(
['_id']
) }} AS complete_bridge_activity_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp,
'{{ invocation_id }}' AS _invocation_id
FROM
FINAL
WHERE
destination_chain <> 'core' qualify (ROW_NUMBER() over (PARTITION BY _id
ORDER BY
_inserted_timestamp DESC)) = 1

View File

@ -1,83 +0,0 @@
version: 2
models:
- name: silver_bridge__complete_bridge_activity
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: BLOCK_TIMESTAMP
tests:
- not_null
- name: ORIGIN_FUNCTION_SIGNATURE
tests:
- not_null
- name: ORIGIN_FROM_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: ORIGIN_TO_ADDRESS
tests:
- not_null:
where: PLATFORM NOT IN ('stargate')
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: EVENT_INDEX
tests:
- not_null:
where: VERSION <> 'v1-native' AND PLATFORM NOT IN ('wormhole','meson')
- name: EVENT_NAME
tests:
- not_null:
where: VERSION <> 'v1-native' AND PLATFORM NOT IN ('wormhole','meson')
- name: BRIDGE_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: DESTINATION_CHAIN
- not_null
- name: DESTINATION_CHAIN_RECEIVER
tests:
- not_null
- name: PLATFORM
- not_null
- name: VERSION
- not_null
- name: SENDER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: RECEIVER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: AMOUNT_UNADJ
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- DECIMAL
- FLOAT
- NUMBER
- name: TOKEN_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: _INSERTED_TIMESTAMP
tests:
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 3

View File

@ -1,83 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = "block_number",
cluster_by = ['block_timestamp::DATE'],
tags = ['silver_bridge','defi','bridge','curated']
) }}
WITH base_evt AS (
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
'symbiosis' AS platform,
event_index,
topics [0] :: STRING AS topic_0,
event_name,
TRY_TO_NUMBER(
decoded_log :"amount" :: STRING
) AS amount_unadj,
TRY_TO_NUMBER(
decoded_log :"chainID" :: STRING
) AS chainID,
decoded_log :"from" :: STRING AS from_address,
decoded_log :"id" :: STRING AS id,
decoded_log :"revertableAddress" :: STRING AS revertableAddress,
decoded_log :"to" :: STRING AS to_address,
decoded_log :"token" :: STRING AS token,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__ez_decoded_event_logs') }}
WHERE
topics [0] :: STRING = '0x31325fe0a1a2e6a5b1e41572156ba5b4e94f0fae7e7f63ec21e9b5ce1e4b3eab'
AND contract_address IN (
'0x292fc50e4eb66c3f6514b9e402dbc25961824d62'
)
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
)
SELECT
block_number,
block_timestamp,
origin_function_signature,
origin_from_address,
origin_to_address,
tx_hash,
event_index,
topic_0,
event_name,
contract_address AS bridge_address,
platform,
from_address AS sender,
to_address AS receiver,
receiver AS destination_chain_receiver,
amount_unadj,
chainID AS destination_chain_id,
id,
revertableAddress AS revertable_address,
token AS token_address,
_log_id,
_inserted_timestamp
FROM
base_evt

View File

@ -1,50 +0,0 @@
version: 2
models:
- name: silver_bridge__symbiosis_synthesizerequest
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: BLOCK_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_LTZ
- TIMESTAMP_NTZ
- name: TX_HASH
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: TOKEN_ADDRESS
tests:
- not_null
- name: SENDER
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: DESTINATION_CHAIN_RECEIVER
tests:
- not_null
- name: BRIDGE_ADDRESS
tests:
- not_null
- dbt_expectations.expect_column_values_to_match_regex:
regex: 0[xX][0-9a-fA-F]+
- name: AMOUNT_UNADJ
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- DECIMAL
- FLOAT
- NUMBER
- name: _INSERTED_TIMESTAMP
tests:
- not_null

View File

@ -1,161 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = 'pool_address',
cluster_by = ['block_timestamp::DATE'],
tags = ['silver_dex','defi','dex','curated']
) }}
WITH pool_traces AS (
SELECT
block_number,
block_timestamp,
tx_hash,
to_address AS pool_address,
to_address AS contract_address,
regexp_substr_all(SUBSTR(input, 11, len(input)), '.{64}') AS segmented_data,
TRY_TO_NUMBER(utils.udf_hex_to_int(segmented_data [0] :: STRING)) / 32 AS token_index,
TRY_TO_NUMBER(utils.udf_hex_to_int(segmented_data [1] :: STRING)) / 32 AS decimal_index,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [token_index] :: STRING
)
) AS token_number,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [decimal_index] :: STRING
)
) AS decimals_number,
TRY_TO_NUMBER(utils.udf_hex_to_int(segmented_data [5] :: STRING)) * pow(
10,
-10
) AS swap_fee,
TRY_TO_NUMBER(utils.udf_hex_to_int(segmented_data [6] :: STRING)) * pow(
10,
-10
) AS admin_fee,
-- 50% of swap fee
CONCAT('0x', SUBSTR(segmented_data [7] :: STRING, 25, 40)) AS lp_token,
CONCAT(
'0x',
SUBSTR(
segmented_data [token_index+1] :: STRING,
25,
40
)
) AS token0,
CONCAT(
'0x',
SUBSTR(
segmented_data [token_index+2] :: STRING,
25,
40
)
) AS token1,
CASE
WHEN token_number > 2 THEN CONCAT(
'0x',
SUBSTR(
segmented_data [token_index+3] :: STRING,
25,
40
)
)
ELSE NULL
END AS token2,
CASE
WHEN token_number > 3 THEN CONCAT(
'0x',
SUBSTR(
segmented_data [token_index+4] :: STRING,
25,
40
)
)
ELSE NULL
END AS token3,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [decimal_index+1] :: STRING
)
) AS decimal0,
TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [decimal_index+2] :: STRING
)
) AS decimal1,
CASE
WHEN decimals_number > 2 THEN TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [decimal_index+3] :: STRING
)
)
ELSE NULL
END AS decimal2,
CASE
WHEN decimals_number > 3 THEN TRY_TO_NUMBER(
utils.udf_hex_to_int(
segmented_data [decimal_index+4] :: STRING
)
)
ELSE NULL
END AS decimal3,
utils.udf_hex_to_string(
segmented_data [array_size(segmented_data)-3] :: STRING
) AS lp_name,
utils.udf_hex_to_string(
segmented_data [array_size(segmented_data)-1] :: STRING
) AS lp_symbol,
CONCAT(
tx_hash :: STRING,
'-',
trace_index :: STRING -- using trace_index instead of event_index
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__fact_traces') }}
WHERE
1 = 1
AND origin_function_signature = '0xb28cb6dc'
AND LEFT(
input,
10
) = '0xb28cb6dc'
AND trace_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
)
SELECT
block_number,
block_timestamp,
tx_hash,
contract_address,
pool_address,
token0,
token1,
token2,
token3,
decimal0,
decimal1,
decimal2,
decimal3,
lp_name,
lp_symbol,
swap_fee,
admin_fee,
lp_token,
_log_id,
_inserted_timestamp
FROM
pool_traces qualify(ROW_NUMBER() over (PARTITION BY pool_address
ORDER BY
_inserted_timestamp DESC)) = 1

View File

@ -1,23 +0,0 @@
version: 2
models:
- name: silver_dex__bitflux_pools
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- POOL_ADDRESS
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: POOL_ADDRESS
tests:
- not_null
- name: TOKEN0
tests:
- not_null
- name: TOKEN1
tests:
- not_null

View File

@ -1,116 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = 'block_number',
cluster_by = ['block_timestamp::DATE'],
tags = ['silver_dex','defi','dex','curated']
) }}
WITH bitflux_pools AS (
SELECT
pool_address,
token0,
token1,
token2,
token3,
decimal0,
decimal1,
decimal2,
decimal3
FROM
{{ ref('silver_dex__bitflux_pools') }}
),
base_swaps AS (
SELECT
l.block_number,
l.block_timestamp,
l.tx_hash,
l.contract_address,
l.origin_function_signature,
l.origin_from_address,
l.origin_to_address,
event_index,
COALESCE(
p1.pool_address,
p2.pool_address
) AS pool_address,
CONCAT('0x', SUBSTR(topic_1, 27, 40)) AS buyer,
regexp_substr_all(SUBSTR(DATA, 3, len(DATA)), '.{64}') AS segmented_data,
TRY_TO_NUMBER(utils.udf_hex_to_int(segmented_data [0] :: STRING)) AS tokensSold,
TRY_TO_NUMBER(utils.udf_hex_to_int(segmented_data [1] :: STRING)) AS tokensBought,
TRY_TO_NUMBER(utils.udf_hex_to_int(segmented_data [2] :: STRING)) AS soldId,
TRY_TO_NUMBER(utils.udf_hex_to_int(segmented_data [3] :: STRING)) AS boughtId,
p1.token0,
p1.token1,
p1.token2,
p1.token3,
CASE
WHEN boughtId = 0 THEN p1.token0
WHEN boughtId = 1 THEN p1.token1
WHEN boughtId = 2 THEN p1.token2
WHEN boughtId = 3 THEN p1.token3
ELSE NULL
END AS token_out,
CASE
WHEN soldId = 0 THEN p2.token0
WHEN soldId = 1 THEN p2.token1
WHEN soldId = 2 THEN p2.token2
WHEN soldId = 3 THEN p2.token3
ELSE NULL
END AS token_in,
tokensSold AS amount_in_unadj,
tokensBought AS amount_out_unadj,
CONCAT(
l.tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__fact_event_logs') }}
l
INNER JOIN bitflux_pools p1
ON l.contract_address = p1.pool_address
INNER JOIN bitflux_pools p2
ON l.contract_address = p2.pool_address
WHERE
topic_0 = '0xc6c1e0630dbe9130cc068028486c0d118ddcea348550819defd5cb8c257f8a38'
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
)
SELECT
block_number,
block_timestamp,
tx_hash,
contract_address,
pool_address,
origin_function_signature,
origin_from_address,
origin_to_address,
buyer AS recipient,
buyer AS sender,
buyer AS tx_to,
'TokenSwap' AS event_name,
event_index,
token0,
token1,
token2,
token3,
token_in,
token_out,
amount_in_unadj,
amount_out_unadj,
_log_id,
_inserted_timestamp
FROM
base_swaps

View File

@ -1,43 +0,0 @@
version: 2
models:
- name: silver_dex__bitflux_swaps
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: BLOCK_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_LTZ
- TIMESTAMP_NTZ
- name: POOL_ADDRESS
tests:
- not_null
- name: RECIPIENT
tests:
- not_null
- name: SENDER
tests:
- not_null
- name: TOKEN0
tests:
- not_null
- name: TOKEN1
tests:
- not_null
- name: AMOUNT_IN_UNADJ
tests:
- not_null
- name: AMOUNT_OUT_UNADJ
tests:
- not_null

View File

@ -1,68 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = 'pool_address',
cluster_by = ['block_timestamp::DATE'],
tags = ['silver_dex','defi','dex','curated']
) }}
WITH pool_creation AS (
SELECT
block_number,
block_timestamp,
tx_hash,
event_index,
contract_address,
regexp_substr_all(SUBSTR(DATA, 3, len(DATA)), '.{64}') AS segmented_data,
CONCAT('0x', SUBSTR(topic_1, 27, 40)) AS token0,
CONCAT('0x', SUBSTR(topic_2, 27, 40)) AS token1,
utils.udf_hex_to_int(
's2c',
topic_3
) :: INTEGER AS fee,
utils.udf_hex_to_int(
's2c',
segmented_data [0] :: STRING
) :: INTEGER AS tick_spacing,
CONCAT('0x', SUBSTR(segmented_data [1] :: STRING, 25, 40)) AS pool_address,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__fact_event_logs') }}
WHERE
contract_address = '0x526190295afb6b8736b14e4b42744fbd95203a3a'
AND topic_0 = '0x783cca1c0412dd0d695e784568c96da2e9c22ff989357a2e8b1d9b2b4e6b7118'
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
)
SELECT
block_number,
block_timestamp,
tx_hash,
contract_address,
event_index,
token0,
token1,
pool_address,
fee,
tick_spacing,
_log_id,
_inserted_timestamp
FROM
pool_creation qualify(ROW_NUMBER() over (PARTITION BY pool_address
ORDER BY
_inserted_timestamp DESC)) = 1

View File

@ -1,23 +0,0 @@
version: 2
models:
- name: silver_dex__corex_pools
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- POOL_ADDRESS
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: POOL_ADDRESS
tests:
- not_null
- name: TOKEN0
tests:
- not_null
- name: TOKEN1
tests:
- not_null

View File

@ -1,104 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = 'block_number',
cluster_by = ['block_timestamp::DATE'],
tags = ['silver_dex','defi','dex','curated']
) }}
WITH pool_data AS (
SELECT
token0,
token1,
fee,
tick_spacing,
pool_address
FROM
{{ ref('silver_dex__corex_pools') }}
),
base_swaps AS (
SELECT
block_number,
block_timestamp,
tx_hash,
contract_address,
event_index,
origin_function_signature,
origin_from_address,
origin_to_address,
CONCAT('0x', SUBSTR(topic_1, 27, 40)) AS sender,
CONCAT('0x', SUBSTR(topic_2, 27, 40)) AS recipient,
regexp_substr_all(SUBSTR(DATA, 3, len(DATA)), '.{64}') AS segmented_data,
utils.udf_hex_to_int(
's2c',
segmented_data [0] :: STRING
) :: FLOAT AS amount0_unadj,
utils.udf_hex_to_int(
's2c',
segmented_data [1] :: STRING
) :: FLOAT AS amount1_unadj,
utils.udf_hex_to_int(
's2c',
segmented_data [2] :: STRING
) :: FLOAT AS sqrtPriceX96,
utils.udf_hex_to_int(
's2c',
segmented_data [3] :: STRING
) :: FLOAT AS liquidity,
utils.udf_hex_to_int(
's2c',
segmented_data [4] :: STRING
) :: FLOAT AS tick,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp,
FROM
{{ ref('core__fact_event_logs') }} l
INNER JOIN pool_data p
ON p.pool_address = l.contract_address
WHERE
topic_0 = '0xc42079f94a6350d7e6235f29174924f928cc2ac818eb64fed8004e115fbcca67'
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
)
SELECT
block_number,
block_timestamp,
tx_hash,
contract_address,
pool_address,
origin_function_signature,
origin_from_address,
origin_to_address,
recipient,
sender,
fee,
tick,
tick_spacing,
liquidity,
event_index,
token0,
token1,
amount0_unadj,
amount1_unadj,
_log_id,
_inserted_timestamp
FROM
base_swaps
INNER JOIN pool_data
ON pool_data.pool_address = base_swaps.contract_address qualify(ROW_NUMBER() over(PARTITION BY _log_id
ORDER BY
_inserted_timestamp DESC)) = 1

View File

@ -1,43 +0,0 @@
version: 2
models:
- name: silver_dex__corex_swaps
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: BLOCK_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_LTZ
- TIMESTAMP_NTZ
- name: POOL_ADDRESS
tests:
- not_null
- name: RECIPIENT
tests:
- not_null
- name: SENDER
tests:
- not_null
- name: TOKEN0
tests:
- not_null
- name: TOKEN1
tests:
- not_null
- name: AMOUNT0_UNADJ
tests:
- not_null
- name: AMOUNT1_UNADJ
tests:
- not_null

View File

@ -1,155 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = 'pool_address',
cluster_by = ['block_timestamp::DATE'],
tags = ['silver_dex','defi','dex','curated']
) }}
WITH pool_creation AS (
SELECT
block_number,
block_timestamp,
tx_hash,
event_index,
contract_address,
CONCAT('0x', SUBSTR(topic_1, 27, 40)) AS token0,
CONCAT('0x', SUBSTR(topic_2, 27, 40)) AS token1,
CONCAT('0x', SUBSTR(DATA, 27, 40)) AS pool_address,
CONCAT(
tx_hash :: STRING,
'-',
event_index :: STRING
) AS _log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('core__fact_event_logs') }}
WHERE
contract_address = '0x74efe55bea4988e7d92d03efd8ddb8bf8b7bd597'
AND topic_0 = '0x91ccaa7a278130b65168c3a0c8d3bcae84cf5e43704342bd3ec0b59e59c036db'
AND tx_succeeded
{% if is_incremental() %}
AND _inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND _inserted_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
),
initial_info AS (
SELECT
tx_hash,
contract_address,
regexp_substr_all(SUBSTR(DATA, 3, len(DATA)), '.{64}') AS segmented_data,
utils.udf_hex_to_int('s2c', CONCAT('0x', segmented_data [0] :: STRING)) AS price,
utils.udf_hex_to_int('s2c', CONCAT('0x', segmented_data [1] :: STRING)) AS tick
FROM
{{ ref('core__fact_event_logs') }}
WHERE
topics [0] :: STRING = '0x98636036cb66a9c19a37435efc1e90142190214e8abeb821bdba3f2990dd4c95'
AND tx_hash IN (
SELECT
tx_hash
FROM
pool_creation
)
AND tx_succeeded
{% if is_incremental() %}
AND modified_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND modified_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
),
tick_spacing AS (
SELECT
tx_hash,
contract_address,
utils.udf_hex_to_int(
's2c',
DATA :: STRING
) :: INTEGER AS tick_spacing
FROM
{{ ref('core__fact_event_logs') }}
WHERE
topic_0 = '0x01413b1d5d4c359e9a0daa7909ecda165f6e8c51fe2ff529d74b22a5a7c02645'
AND tx_hash IN (
SELECT
tx_hash
FROM
pool_creation
)
AND tx_succeeded
{% if is_incremental() %}
AND modified_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND modified_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
),
fee AS (
SELECT
tx_hash,
contract_address,
utils.udf_hex_to_int(
's2c',
DATA :: STRING
) :: INTEGER AS fee
FROM
{{ ref('core__fact_event_logs') }}
WHERE
topic_0 = '0x598b9f043c813aa6be3426ca60d1c65d17256312890be5118dab55b0775ebe2a'
AND tx_hash IN (
SELECT
tx_hash
FROM
pool_creation
)
AND tx_succeeded
{% if is_incremental() %}
AND modified_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND modified_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
)
SELECT
block_number,
block_timestamp,
p.tx_hash,
p.contract_address,
event_index,
token0,
token1,
pool_address,
fee,
tick,
tick_spacing,
_log_id,
_inserted_timestamp
FROM
pool_creation p
INNER JOIN initial_info
ON initial_info.contract_address = p.pool_address
INNER JOIN tick_spacing
ON tick_spacing.contract_address = p.pool_address
INNER JOIN fee
ON fee.contract_address = p.pool_address qualify(ROW_NUMBER() over (PARTITION BY pool_address
ORDER BY
_inserted_timestamp DESC)) = 1

View File

@ -1,23 +0,0 @@
version: 2
models:
- name: silver_dex__glyph_v4_pools
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- POOL_ADDRESS
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: POOL_ADDRESS
tests:
- not_null
- name: TOKEN0
tests:
- not_null
- name: TOKEN1
tests:
- not_null

View File

@ -1,117 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = 'block_number',
cluster_by = ['block_timestamp::DATE'],
tags = ['silver_dex','defi','dex','curated']
) }}
WITH swaps_base AS (
SELECT
l.block_number,
l.block_timestamp,
l.tx_hash,
l.event_index,
l.origin_function_signature,
l.origin_from_address,
l.origin_to_address,
l.contract_address,
regexp_substr_all(SUBSTR(DATA, 3, len(DATA)), '.{64}') AS segmented_data,
CONCAT('0x', SUBSTR(topic_1, 27, 40)) AS sender,
CONCAT('0x', SUBSTR(topic_2, 27, 40)) AS recipient,
utils.udf_hex_to_int(
's2c',
segmented_data [0] :: STRING
) :: FLOAT AS amount0_unadj,
utils.udf_hex_to_int(
's2c',
segmented_data [1] :: STRING
) :: FLOAT AS amount1_unadj,
utils.udf_hex_to_int(
's2c',
segmented_data [2] :: STRING
) :: FLOAT AS sqrtPriceX96,
utils.udf_hex_to_int(
's2c',
segmented_data [3] :: STRING
) :: FLOAT AS liquidity,
utils.udf_hex_to_int(
's2c',
segmented_data [4] :: STRING
) :: FLOAT AS tick,
token0,
token1,
pool_address,
tick_spacing,
fee,
CONCAT(
l.tx_hash,
'-',
l.event_index
) AS _log_id,
l.modified_timestamp
FROM
{{ ref('core__fact_event_logs') }}
l
INNER JOIN {{ ref('silver_dex__glyph_v4_pools') }}
p
ON p.pool_address = l.contract_address
WHERE
topic_0 = '0xc42079f94a6350d7e6235f29174924f928cc2ac818eb64fed8004e115fbcca67'
AND tx_succeeded
{% if is_incremental() %}
AND l.modified_timestamp >= (
SELECT
MAX(modified_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND l.modified_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
)
SELECT
block_number,
block_timestamp,
tx_hash,
event_index,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
pool_address,
recipient,
recipient AS tx_to,
sender,
fee,
tick,
tick_spacing,
liquidity,
token0,
token1,
amount0_unadj,
amount1_unadj,
CASE
WHEN amount0_unadj > 0 THEN ABS(amount0_unadj)
ELSE ABS(amount1_unadj)
END AS amount_in_unadj,
CASE
WHEN amount0_unadj < 0 THEN ABS(amount0_unadj)
ELSE ABS(amount1_unadj)
END AS amount_out_unadj,
CASE
WHEN amount0_unadj > 0 THEN token0
ELSE token1
END AS token_in,
CASE
WHEN amount0_unadj < 0 THEN token0
ELSE token1
END AS token_out,
_log_id,
modified_timestamp
FROM
swaps_base qualify(ROW_NUMBER() over(PARTITION BY _log_id
ORDER BY
modified_timestamp DESC)) = 1

View File

@ -1,43 +0,0 @@
version: 2
models:
- name: silver_dex__glyph_v4_swaps
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: BLOCK_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_LTZ
- TIMESTAMP_NTZ
- name: POOL_ADDRESS
tests:
- not_null
- name: RECIPIENT
tests:
- not_null
- name: SENDER
tests:
- not_null
- name: TOKEN0
tests:
- not_null
- name: TOKEN1
tests:
- not_null
- name: AMOUNT0_UNADJ
tests:
- not_null
- name: AMOUNT1_UNADJ
tests:
- not_null

View File

@ -1,911 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = ['block_number','platform','version'],
cluster_by = ['block_timestamp::DATE','platform'],
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION ON EQUALITY(tx_hash, contract_address, pool_address, pool_name, tokens, symbols), SUBSTRING(pool_address, pool_name, tokens, symbols)",
tags = ['silver_dex','defi','dex','curated','heal']
) }}
WITH contracts AS (
SELECT
contract_address,
token_symbol,
token_decimals,
_inserted_timestamp
FROM
{{ ref('silver__contracts') }}
),
bitflux AS (
SELECT
block_number,
block_timestamp,
tx_hash,
contract_address,
pool_address,
NULL AS pool_name,
NULL AS fee,
NULL AS tick_spacing,
token0,
token1,
token2,
token3,
NULL AS token4,
NULL AS token5,
NULL AS token6,
NULL AS token7,
'bitflux' AS platform,
'v1' AS version,
_log_id AS _id,
_inserted_timestamp
FROM
{{ ref('silver_dex__bitflux_pools') }}
{% if is_incremental() and 'bitflux' not in var('HEAL_MODELS') %}
WHERE
_inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '{{ var("LOOKBACK", "4 hours") }}'
FROM
{{ this }}
)
{% endif %}
),
corex AS (
SELECT
block_number,
block_timestamp,
tx_hash,
contract_address,
pool_address,
NULL AS pool_name,
fee,
tick_spacing,
token0,
token1,
NULL AS token2,
NULL AS token3,
NULL AS token4,
NULL AS token5,
NULL AS token6,
NULL AS token7,
'corex' AS platform,
'v1' AS version,
_log_id AS _id,
_inserted_timestamp
FROM
{{ ref('silver_dex__corex_pools') }}
{% if is_incremental() and 'corex' not in var('HEAL_MODELS') %}
WHERE
_inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '{{ var("LOOKBACK", "4 hours") }}'
FROM
{{ this }}
)
{% endif %}
),
glyph_v4 AS (
SELECT
block_number,
block_timestamp,
tx_hash,
contract_address,
pool_address,
NULL AS pool_name,
fee,
tick_spacing,
token0,
token1,
NULL AS token2,
NULL AS token3,
NULL AS token4,
NULL AS token5,
NULL AS token6,
NULL AS token7,
'glyph-v4' AS platform,
'v4' AS version,
_log_id AS _id,
_inserted_timestamp
FROM
{{ ref('silver_dex__glyph_v4_pools') }}
{% if is_incremental() and 'glyph_v4' not in var('HEAL_MODELS') %}
WHERE
_inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '{{ var("LOOKBACK", "4 hours") }}'
FROM
{{ this }}
)
{% endif %}
),
sushi_v3 AS (
SELECT
block_number,
block_timestamp,
tx_hash,
contract_address,
pool_address,
NULL AS pool_name,
fee,
tick_spacing,
token0,
token1,
NULL AS token2,
NULL AS token3,
NULL AS token4,
NULL AS token5,
NULL AS token6,
NULL AS token7,
'sushi-v3' AS platform,
'v3' AS version,
_log_id AS _id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('silver_dex__sushi_v3_pools') }}
{% if is_incremental() and 'sushi_v3' not in var('HEAL_MODELS') %}
WHERE
_inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '{{ var("LOOKBACK", "4 hours") }}'
FROM
{{ this }}
)
{% endif %}
),
all_pools AS (
SELECT
*
FROM
bitflux
UNION ALL
SELECT
*
FROM
corex
UNION ALL
SELECT
*
FROM
glyph_v4
UNION ALL
SELECT
*
FROM
sushi_v3
),
complete_lps AS (
SELECT
block_number,
block_timestamp,
tx_hash,
p.contract_address,
pool_address,
CASE
WHEN pool_name IS NOT NULL THEN pool_name
WHEN pool_name IS NULL
AND platform IN (
'corex',
'glyph-v4',
'sushi-v3'
) THEN CONCAT(
COALESCE(
c0.token_symbol,
CONCAT(SUBSTRING(token0, 1, 5), '...', SUBSTRING(token0, 39, 42))
),
'-',
COALESCE(
c1.token_symbol,
CONCAT(SUBSTRING(token1, 1, 5), '...', SUBSTRING(token1, 39, 42))
),
' ',
COALESCE(
fee,
0
),
' ',
COALESCE(
tick_spacing,
0
),
CASE
WHEN platform = 'corex' THEN ' COREX LP'
WHEN platform = 'glyph-v4' THEN ' GLYPH-V4 LP'
WHEN platform = 'sushi-v3' THEN ' SUSHI-V3 LP'
END
)
WHEN pool_name IS NULL
AND platform IN (
'balancer',
'curve',
'bitflux'
) THEN CONCAT(
COALESCE(c0.token_symbol, SUBSTRING(token0, 1, 5) || '...' || SUBSTRING(token0, 39, 42)),
CASE
WHEN token1 IS NOT NULL THEN '-' || COALESCE(c1.token_symbol, SUBSTRING(token1, 1, 5) || '...' || SUBSTRING(token1, 39, 42))
ELSE ''
END,
CASE
WHEN token2 IS NOT NULL THEN '-' || COALESCE(c2.token_symbol, SUBSTRING(token2, 1, 5) || '...' || SUBSTRING(token2, 39, 42))
ELSE ''
END,
CASE
WHEN token3 IS NOT NULL THEN '-' || COALESCE(c3.token_symbol, SUBSTRING(token3, 1, 5) || '...' || SUBSTRING(token3, 39, 42))
ELSE ''
END,
CASE
WHEN token4 IS NOT NULL THEN '-' || COALESCE(c4.token_symbol, SUBSTRING(token4, 1, 5) || '...' || SUBSTRING(token4, 39, 42))
ELSE ''
END,
CASE
WHEN token5 IS NOT NULL THEN '-' || COALESCE(c5.token_symbol, SUBSTRING(token5, 1, 5) || '...' || SUBSTRING(token5, 39, 42))
ELSE ''
END,
CASE
WHEN token6 IS NOT NULL THEN '-' || COALESCE(c6.token_symbol, SUBSTRING(token6, 1, 5) || '...' || SUBSTRING(token6, 39, 42))
ELSE ''
END,
CASE
WHEN token7 IS NOT NULL THEN '-' || COALESCE(c7.token_symbol, SUBSTRING(token7, 1, 5) || '...' || SUBSTRING(token7, 39, 42))
ELSE ''
END
)
ELSE CONCAT(
COALESCE(
c0.token_symbol,
CONCAT(SUBSTRING(token0, 1, 5), '...', SUBSTRING(token0, 39, 42))
),
'-',
COALESCE(
c1.token_symbol,
CONCAT(SUBSTRING(token1, 1, 5), '...', SUBSTRING(token1, 39, 42))
)
)
END AS pool_name,
fee,
tick_spacing,
token0,
token1,
token2,
token3,
token4,
token5,
token6,
token7,
OBJECT_CONSTRUCT(
'token0',
token0,
'token1',
token1,
'token2',
token2,
'token3',
token3,
'token4',
token4,
'token5',
token5,
'token6',
token6,
'token7',
token7
) AS tokens,
OBJECT_CONSTRUCT(
'token0',
c0.token_symbol,
'token1',
c1.token_symbol,
'token2',
c2.token_symbol,
'token3',
c3.token_symbol,
'token4',
c4.token_symbol,
'token5',
c5.token_symbol,
'token6',
c6.token_symbol,
'token7',
c7.token_symbol
) AS symbols,
OBJECT_CONSTRUCT(
'token0',
c0.token_decimals,
'token1',
c1.token_decimals,
'token2',
c2.token_decimals,
'token3',
c3.token_decimals,
'token4',
c4.token_decimals,
'token5',
c5.token_decimals,
'token6',
c6.token_decimals,
'token7',
c7.token_decimals
) AS decimals,
platform,
version,
_id,
p._inserted_timestamp
FROM
all_pools p
LEFT JOIN contracts c0
ON c0.contract_address = p.token0
LEFT JOIN contracts c1
ON c1.contract_address = p.token1
LEFT JOIN contracts c2
ON c2.contract_address = p.token2
LEFT JOIN contracts c3
ON c3.contract_address = p.token3
LEFT JOIN contracts c4
ON c4.contract_address = p.token4
LEFT JOIN contracts c5
ON c5.contract_address = p.token5
LEFT JOIN contracts c6
ON c6.contract_address = p.token6
LEFT JOIN contracts c7
ON c7.contract_address = p.token7
),
{% if is_incremental() and var(
'HEAL_MODEL'
) %}
heal_model AS (
SELECT
block_number,
block_timestamp,
tx_hash,
t0.contract_address,
pool_address,
CASE
WHEN pool_name IS NOT NULL THEN pool_name
WHEN pool_name IS NULL
AND platform IN (
'corex',
'glyph-v4',
'sushi-v3'
) THEN CONCAT(
COALESCE(
c0.token_symbol,
CONCAT(SUBSTRING(token0, 1, 5), '...', SUBSTRING(token0, 39, 42))
),
'-',
COALESCE(
c1.token_symbol,
CONCAT(SUBSTRING(token1, 1, 5), '...', SUBSTRING(token1, 39, 42))
),
' ',
COALESCE(
fee,
0
),
' ',
COALESCE(
tick_spacing,
0
),
CASE
WHEN platform = 'corex' THEN ' COREX LP'
WHEN platform = 'glyph-v4' THEN ' GLYPH-V4 LP'
WHEN platform = 'sushi-v3' THEN ' SUSHI-V3 LP'
END
)
WHEN pool_name IS NULL
AND platform IN (
'balancer',
'curve',
'bitflux'
) THEN CONCAT(
COALESCE(c0.token_symbol, SUBSTRING(token0, 1, 5) || '...' || SUBSTRING(token0, 39, 42)),
CASE
WHEN token1 IS NOT NULL THEN '-' || COALESCE(c1.token_symbol, SUBSTRING(token1, 1, 5) || '...' || SUBSTRING(token1, 39, 42))
ELSE ''
END,
CASE
WHEN token2 IS NOT NULL THEN '-' || COALESCE(c2.token_symbol, SUBSTRING(token2, 1, 5) || '...' || SUBSTRING(token2, 39, 42))
ELSE ''
END,
CASE
WHEN token3 IS NOT NULL THEN '-' || COALESCE(c3.token_symbol, SUBSTRING(token3, 1, 5) || '...' || SUBSTRING(token3, 39, 42))
ELSE ''
END,
CASE
WHEN token4 IS NOT NULL THEN '-' || COALESCE(c4.token_symbol, SUBSTRING(token4, 1, 5) || '...' || SUBSTRING(token4, 39, 42))
ELSE ''
END,
CASE
WHEN token5 IS NOT NULL THEN '-' || COALESCE(c5.token_symbol, SUBSTRING(token5, 1, 5) || '...' || SUBSTRING(token5, 39, 42))
ELSE ''
END,
CASE
WHEN token6 IS NOT NULL THEN '-' || COALESCE(c6.token_symbol, SUBSTRING(token6, 1, 5) || '...' || SUBSTRING(token6, 39, 42))
ELSE ''
END,
CASE
WHEN token7 IS NOT NULL THEN '-' || COALESCE(c7.token_symbol, SUBSTRING(token7, 1, 5) || '...' || SUBSTRING(token7, 39, 42))
ELSE ''
END
)
ELSE CONCAT(
COALESCE(
c0.token_symbol,
CONCAT(SUBSTRING(token0, 1, 5), '...', SUBSTRING(token0, 39, 42))
),
'-',
COALESCE(
c1.token_symbol,
CONCAT(SUBSTRING(token1, 1, 5), '...', SUBSTRING(token1, 39, 42))
)
)
END AS pool_name_heal,
fee,
tick_spacing,
token0,
token1,
token2,
token3,
token4,
token5,
token6,
token7,
tokens,
OBJECT_CONSTRUCT(
'token0',
c0.token_symbol,
'token1',
c1.token_symbol,
'token2',
c2.token_symbol,
'token3',
c3.token_symbol,
'token4',
c4.token_symbol,
'token5',
c5.token_symbol,
'token6',
c6.token_symbol,
'token7',
c7.token_symbol
) AS symbols_heal,
OBJECT_CONSTRUCT(
'token0',
c0.token_decimals,
'token1',
c1.token_decimals,
'token2',
c2.token_decimals,
'token3',
c3.token_decimals,
'token4',
c4.token_decimals,
'token5',
c5.token_decimals,
'token6',
c6.token_decimals,
'token7',
c7.token_decimals
) AS decimals_heal,
platform,
version,
_id,
t0._inserted_timestamp
FROM
{{ this }}
t0
LEFT JOIN contracts c0
ON c0.contract_address = t0.token0
LEFT JOIN contracts c1
ON c1.contract_address = t0.token1
LEFT JOIN contracts c2
ON c2.contract_address = t0.token2
LEFT JOIN contracts c3
ON c3.contract_address = t0.token3
LEFT JOIN contracts c4
ON c4.contract_address = t0.token4
LEFT JOIN contracts c5
ON c5.contract_address = t0.token5
LEFT JOIN contracts c6
ON c6.contract_address = t0.token6
LEFT JOIN contracts c7
ON c7.contract_address = t0.token7
WHERE
CONCAT(
t0.block_number,
'-',
t0.platform,
'-',
t0.version
) IN (
SELECT
CONCAT(
t1.block_number,
'-',
t1.platform,
'-',
t1.version
)
FROM
{{ this }}
t1
WHERE
t1.decimals :token0 :: INT IS NULL
AND t1._inserted_timestamp < (
SELECT
MAX(
_inserted_timestamp
) - INTERVAL '{{ var("LOOKBACK", "4 hours") }}'
FROM
{{ this }}
)
AND EXISTS (
SELECT
1
FROM
{{ ref('silver__contracts') }} C
WHERE
C._inserted_timestamp > DATEADD('DAY', -14, SYSDATE())
AND C.token_decimals IS NOT NULL
AND C.contract_address = t1.tokens :token0 :: STRING)
GROUP BY
1
)
OR CONCAT(
t0.block_number,
'-',
t0.platform,
'-',
t0.version
) IN (
SELECT
CONCAT(
t2.block_number,
'-',
t2.platform,
'-',
t2.version
)
FROM
{{ this }}
t2
WHERE
t2.decimals :token1 :: INT IS NULL
AND t2._inserted_timestamp < (
SELECT
MAX(
_inserted_timestamp
) - INTERVAL '{{ var("LOOKBACK", "4 hours") }}'
FROM
{{ this }}
)
AND EXISTS (
SELECT
1
FROM
{{ ref('silver__contracts') }} C
WHERE
C._inserted_timestamp > DATEADD('DAY', -14, SYSDATE())
AND C.token_decimals IS NOT NULL
AND C.contract_address = t2.tokens :token1 :: STRING)
GROUP BY
1
)
OR CONCAT(
t0.block_number,
'-',
t0.platform,
'-',
t0.version
) IN (
SELECT
CONCAT(
t3.block_number,
'-',
t3.platform,
'-',
t3.version
)
FROM
{{ this }}
t3
WHERE
t3.decimals :token2 :: INT IS NULL
AND t3._inserted_timestamp < (
SELECT
MAX(
_inserted_timestamp
) - INTERVAL '{{ var("LOOKBACK", "4 hours") }}'
FROM
{{ this }}
)
AND EXISTS (
SELECT
1
FROM
{{ ref('silver__contracts') }} C
WHERE
C._inserted_timestamp > DATEADD('DAY', -14, SYSDATE())
AND C.token_decimals IS NOT NULL
AND C.contract_address = t3.tokens :token2 :: STRING)
GROUP BY
1
)
OR CONCAT(
t0.block_number,
'-',
t0.platform,
'-',
t0.version
) IN (
SELECT
CONCAT(
t4.block_number,
'-',
t4.platform,
'-',
t4.version
)
FROM
{{ this }}
t4
WHERE
t4.decimals :token3 :: INT IS NULL
AND t4._inserted_timestamp < (
SELECT
MAX(
_inserted_timestamp
) - INTERVAL '{{ var("LOOKBACK", "4 hours") }}'
FROM
{{ this }}
)
AND EXISTS (
SELECT
1
FROM
{{ ref('silver__contracts') }} C
WHERE
C._inserted_timestamp > DATEADD('DAY', -14, SYSDATE())
AND C.token_decimals IS NOT NULL
AND C.contract_address = t4.tokens :token3 :: STRING)
GROUP BY
1
)
OR CONCAT(
t0.block_number,
'-',
t0.platform,
'-',
t0.version
) IN (
SELECT
CONCAT(
t5.block_number,
'-',
t5.platform,
'-',
t5.version
)
FROM
{{ this }}
t5
WHERE
t5.decimals :token4 :: INT IS NULL
AND t5._inserted_timestamp < (
SELECT
MAX(
_inserted_timestamp
) - INTERVAL '{{ var("LOOKBACK", "4 hours") }}'
FROM
{{ this }}
)
AND EXISTS (
SELECT
1
FROM
{{ ref('silver__contracts') }} C
WHERE
C._inserted_timestamp > DATEADD('DAY', -14, SYSDATE())
AND C.token_decimals IS NOT NULL
AND C.contract_address = t5.tokens :token4 :: STRING)
GROUP BY
1
)
OR CONCAT(
t0.block_number,
'-',
t0.platform,
'-',
t0.version
) IN (
SELECT
CONCAT(
t6.block_number,
'-',
t6.platform,
'-',
t6.version
)
FROM
{{ this }}
t6
WHERE
t6.decimals :token5 :: INT IS NULL
AND t6._inserted_timestamp < (
SELECT
MAX(
_inserted_timestamp
) - INTERVAL '{{ var("LOOKBACK", "4 hours") }}'
FROM
{{ this }}
)
AND EXISTS (
SELECT
1
FROM
{{ ref('silver__contracts') }} C
WHERE
C._inserted_timestamp > DATEADD('DAY', -14, SYSDATE())
AND C.token_decimals IS NOT NULL
AND C.contract_address = t6.tokens :token5 :: STRING)
GROUP BY
1
)
OR CONCAT(
t0.block_number,
'-',
t0.platform,
'-',
t0.version
) IN (
SELECT
CONCAT(
t7.block_number,
'-',
t7.platform,
'-',
t7.version
)
FROM
{{ this }}
t7
WHERE
t7.decimals :token6 :: INT IS NULL
AND t7._inserted_timestamp < (
SELECT
MAX(
_inserted_timestamp
) - INTERVAL '{{ var("LOOKBACK", "4 hours") }}'
FROM
{{ this }}
)
AND EXISTS (
SELECT
1
FROM
{{ ref('silver__contracts') }} C
WHERE
C._inserted_timestamp > DATEADD('DAY', -14, SYSDATE())
AND C.token_decimals IS NOT NULL
AND C.contract_address = t7.tokens :token6 :: STRING)
GROUP BY
1
)
OR CONCAT(
t0.block_number,
'-',
t0.platform,
'-',
t0.version
) IN (
SELECT
CONCAT(
t8.block_number,
'-',
t8.platform,
'-',
t8.version
)
FROM
{{ this }}
t8
WHERE
t8.decimals :token7 :: INT IS NULL
AND t8._inserted_timestamp < (
SELECT
MAX(
_inserted_timestamp
) - INTERVAL '{{ var("LOOKBACK", "4 hours") }}'
FROM
{{ this }}
)
AND EXISTS (
SELECT
1
FROM
{{ ref('silver__contracts') }} C
WHERE
C._inserted_timestamp > DATEADD('DAY', -14, SYSDATE())
AND C.token_decimals IS NOT NULL
AND C.contract_address = t8.tokens :token7 :: STRING)
GROUP BY
1
)
),
{% endif %}
FINAL AS (
SELECT
*
FROM
complete_lps
{% if is_incremental() and var(
'HEAL_MODEL'
) %}
UNION ALL
SELECT
block_number,
block_timestamp,
tx_hash,
contract_address,
pool_address,
pool_name_heal AS pool_name,
fee,
tick_spacing,
token0,
token1,
token2,
token3,
token4,
token5,
token6,
token7,
tokens,
symbols_heal AS symbols,
decimals_heal AS decimals,
platform,
version,
_id,
_inserted_timestamp
FROM
heal_model
{% endif %}
)
SELECT
block_number,
block_timestamp,
tx_hash,
platform,
version,
contract_address,
pool_address,
pool_name,
tokens,
symbols,
decimals,
fee,
tick_spacing,
token0,
token1,
token2,
token3,
token4,
token5,
token6,
token7,
_id,
_inserted_timestamp,
{{ dbt_utils.generate_surrogate_key(
['pool_address']
) }} AS complete_dex_liquidity_pools_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp,
'{{ invocation_id }}' AS _invocation_id
FROM
FINAL

View File

@ -1,651 +0,0 @@
-- depends_on: {{ ref('silver__complete_token_prices') }}
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = ['block_number','platform','version'],
cluster_by = ['block_timestamp::DATE','platform'],
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION ON EQUALITY(tx_hash, origin_function_signature, origin_from_address, origin_to_address, contract_address, pool_name, event_name, sender, tx_to, token_in, token_out, symbol_in, symbol_out), SUBSTRING(origin_function_signature, pool_name, event_name, sender, tx_to, token_in, token_out, symbol_in, symbol_out)",
tags = ['silver_dex','defi','dex','curated','heal']
) }}
WITH
bitflux AS (
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
pool_address AS contract_address,
event_name,
amount_in_unadj,
amount_out_unadj,
token_in,
token_out,
sender,
tx_to,
event_index,
'bitflux' AS platform,
'bitflux' AS protocol,
'v1' AS version,
_log_id,
_inserted_timestamp
FROM
{{ ref('silver_dex__bitflux_swaps') }}
{% if is_incremental() and 'bitflux' not in var('HEAL_MODELS') %}
WHERE
_inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '{{ var("LOOKBACK", "4 hours") }}'
FROM
{{ this }}
)
{% endif %}
),
corex AS (
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
pool_address AS contract_address,
'Swap' AS event_name,
CASE
WHEN amount0_unadj > 0 THEN ABS(amount0_unadj)
ELSE ABS(amount1_unadj)
END AS amount_in_unadj,
CASE
WHEN amount0_unadj < 0 THEN ABS(amount0_unadj)
ELSE ABS(amount1_unadj)
END AS amount_out_unadj,
CASE
WHEN amount0_unadj > 0 THEN token0
ELSE token1
END AS token_in,
CASE
WHEN amount0_unadj < 0 THEN token0
ELSE token1
END AS token_out,
sender,
recipient AS tx_to,
event_index,
'corex' AS platform,
'corex' AS protocol,
'v1' AS version,
_log_id,
_inserted_timestamp
FROM
{{ ref('silver_dex__corex_swaps') }}
{% if is_incremental() and 'corex' not in var('HEAL_MODELS') %}
WHERE
_inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '{{ var("LOOKBACK", "4 hours") }}'
FROM
{{ this }}
)
{% endif %}
),
glyph_v4 AS (
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
pool_address AS contract_address,
'Swap' AS event_name,
amount_in_unadj,
amount_out_unadj,
token_in,
token_out,
sender,
recipient AS tx_to,
event_index,
'glyph-v4' AS platform,
'glyph' AS protocol,
'v4' AS version,
_log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('silver_dex__glyph_v4_swaps') }}
{% if is_incremental() and 'glyph_v4' not in var('HEAL_MODELS') %}
WHERE
_inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '{{ var("LOOKBACK", "4 hours") }}'
FROM
{{ this }}
)
{% endif %}
),
sushi_v3 AS (
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
pool_address AS contract_address,
'Swap' AS event_name,
amount_in_unadj,
amount_out_unadj,
token_in,
token_out,
sender,
recipient AS tx_to,
event_index,
'sushi-v3' AS platform,
'sushiswap' AS protocol,
'v3' AS version,
_log_id,
modified_timestamp AS _inserted_timestamp
FROM
{{ ref('silver_dex__sushi_v3_swaps') }}
{% if is_incremental() and 'sushi_v3' not in var('HEAL_MODELS') %}
WHERE
_inserted_timestamp >= (
SELECT
MAX(_inserted_timestamp) - INTERVAL '{{ var("LOOKBACK", "4 hours") }}'
FROM
{{ this }}
)
{% endif %}
),
all_dex AS (
SELECT
*
FROM
bitflux
UNION ALL
SELECT
*
FROM
corex
UNION ALL
SELECT
*
FROM
glyph_v4
UNION ALL
SELECT
*
FROM
sushi_v3
),
complete_dex_swaps AS (
SELECT
s.block_number,
s.block_timestamp,
s.tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
s.contract_address,
event_name,
token_in,
p1.is_verified as token_in_is_verified,
c1.token_decimals AS decimals_in,
c1.token_symbol AS symbol_in,
amount_in_unadj,
CASE
WHEN decimals_in IS NULL THEN amount_in_unadj
ELSE (amount_in_unadj / pow(10, decimals_in))
END AS amount_in,
CASE
WHEN decimals_in IS NOT NULL THEN amount_in * p1.price
ELSE NULL
END AS amount_in_usd,
token_out,
p2.is_verified as token_out_is_verified,
c2.token_decimals AS decimals_out,
c2.token_symbol AS symbol_out,
amount_out_unadj,
CASE
WHEN decimals_out IS NULL THEN amount_out_unadj
ELSE (amount_out_unadj / pow(10, decimals_out))
END AS amount_out,
CASE
WHEN decimals_out IS NOT NULL THEN amount_out * p2.price
ELSE NULL
END AS amount_out_usd,
CASE
WHEN lp.pool_name IS NULL THEN CONCAT(
LEAST(
COALESCE(
symbol_in,
CONCAT(SUBSTRING(token_in, 1, 5), '...', SUBSTRING(token_in, 39, 42))
),
COALESCE(
symbol_out,
CONCAT(SUBSTRING(token_out, 1, 5), '...', SUBSTRING(token_out, 39, 42))
)
),
'-',
GREATEST(
COALESCE(
symbol_in,
CONCAT(SUBSTRING(token_in, 1, 5), '...', SUBSTRING(token_in, 39, 42))
),
COALESCE(
symbol_out,
CONCAT(SUBSTRING(token_out, 1, 5), '...', SUBSTRING(token_out, 39, 42))
)
)
)
ELSE lp.pool_name
END AS pool_name,
sender,
tx_to,
event_index,
s.platform,
s.protocol,
s.version,
s._log_id,
s._inserted_timestamp
FROM
all_dex s
LEFT JOIN {{ ref('silver__contracts') }}
c1
ON s.token_in = c1.contract_address
LEFT JOIN {{ ref('silver__contracts') }}
c2
ON s.token_out = c2.contract_address
LEFT JOIN {{ ref('price__ez_prices_hourly') }}
p1
ON s.token_in = p1.token_address
AND DATE_TRUNC(
'hour',
block_timestamp
) = p1.hour
LEFT JOIN {{ ref('price__ez_prices_hourly') }}
p2
ON s.token_out = p2.token_address
AND DATE_TRUNC(
'hour',
block_timestamp
) = p2.hour
LEFT JOIN {{ ref('silver_dex__complete_dex_liquidity_pools') }}
lp
ON s.contract_address = lp.pool_address
),
{% if is_incremental() and var(
'HEAL_MODEL'
) %}
heal_model AS (
SELECT
t0.block_number,
t0.block_timestamp,
t0.tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
t0.contract_address,
event_name,
token_in,
p1.is_verified as token_in_is_verified,
c1.token_decimals AS decimals_in,
c1.token_symbol AS symbol_in,
amount_in_unadj,
CASE
WHEN c1.token_decimals IS NULL THEN amount_in_unadj
ELSE (amount_in_unadj / pow(10, c1.token_decimals))
END AS amount_in_heal,
CASE
WHEN c1.token_decimals IS NOT NULL THEN amount_in_heal * p1.price
ELSE NULL
END AS amount_in_usd_heal,
token_out,
p2.is_verified as token_out_is_verified,
c2.token_decimals AS decimals_out,
c2.token_symbol AS symbol_out,
amount_out_unadj,
CASE
WHEN c2.token_decimals IS NULL THEN amount_out_unadj
ELSE (amount_out_unadj / pow(10, c2.token_decimals))
END AS amount_out_heal,
CASE
WHEN c2.token_decimals IS NOT NULL THEN amount_out_heal * p2.price
ELSE NULL
END AS amount_out_usd_heal,
CASE
WHEN lp.pool_name IS NULL THEN CONCAT(
LEAST(
COALESCE(
c1.token_symbol,
CONCAT(SUBSTRING(token_in, 1, 5), '...', SUBSTRING(token_in, 39, 42))
),
COALESCE(
c2.token_symbol,
CONCAT(SUBSTRING(token_out, 1, 5), '...', SUBSTRING(token_out, 39, 42))
)
),
'-',
GREATEST(
COALESCE(
c1.token_symbol,
CONCAT(SUBSTRING(token_in, 1, 5), '...', SUBSTRING(token_in, 39, 42))
),
COALESCE(
c2.token_symbol,
CONCAT(SUBSTRING(token_out, 1, 5), '...', SUBSTRING(token_out, 39, 42))
)
)
)
ELSE lp.pool_name
END AS pool_name_heal,
sender,
tx_to,
event_index,
t0.platform,
t0.protocol,
t0.version,
t0._log_id,
t0._inserted_timestamp
FROM
{{ this }}
t0
LEFT JOIN {{ ref('silver__contracts') }}
c1
ON t0.token_in = c1.contract_address
LEFT JOIN {{ ref('silver__contracts') }}
c2
ON t0.token_out = c2.contract_address
LEFT JOIN {{ ref('price__ez_prices_hourly') }}
p1
ON t0.token_in = p1.token_address
AND DATE_TRUNC(
'hour',
block_timestamp
) = p1.hour
LEFT JOIN {{ ref('price__ez_prices_hourly') }}
p2
ON t0.token_out = p2.token_address
AND DATE_TRUNC(
'hour',
block_timestamp
) = p2.hour
LEFT JOIN {{ ref('silver_dex__complete_dex_liquidity_pools') }}
lp
ON t0.contract_address = lp.pool_address
WHERE
CONCAT(
t0.block_number,
'-',
t0.platform,
'-',
t0.version
) IN (
SELECT
CONCAT(
t1.block_number,
'-',
t1.platform,
'-',
t1.version
)
FROM
{{ this }}
t1
WHERE
t1.decimals_in IS NULL
AND t1._inserted_timestamp < (
SELECT
MAX(
_inserted_timestamp
) - INTERVAL '{{ var("LOOKBACK", "4 hours") }}'
FROM
{{ this }}
)
AND EXISTS (
SELECT
1
FROM
{{ ref('silver__contracts') }} C
WHERE
C._inserted_timestamp > DATEADD('DAY', -14, SYSDATE())
AND C.token_decimals IS NOT NULL
AND C.contract_address = t1.token_in)
GROUP BY
1
)
OR CONCAT(
t0.block_number,
'-',
t0.platform,
'-',
t0.version
) IN (
SELECT
CONCAT(
t2.block_number,
'-',
t2.platform,
'-',
t2.version
)
FROM
{{ this }}
t2
WHERE
t2.decimals_out IS NULL
AND t2._inserted_timestamp < (
SELECT
MAX(
_inserted_timestamp
) - INTERVAL '{{ var("LOOKBACK", "4 hours") }}'
FROM
{{ this }}
)
AND EXISTS (
SELECT
1
FROM
{{ ref('silver__contracts') }} C
WHERE
C._inserted_timestamp > DATEADD('DAY', -14, SYSDATE())
AND C.token_decimals IS NOT NULL
AND C.contract_address = t2.token_out)
GROUP BY
1
)
OR CONCAT(
t0.block_number,
'-',
t0.platform,
'-',
t0.version
) IN (
SELECT
CONCAT(
t3.block_number,
'-',
t3.platform,
'-',
t3.version
)
FROM
{{ this }}
t3
WHERE
t3.amount_in_usd IS NULL
AND t3._inserted_timestamp < (
SELECT
MAX(
_inserted_timestamp
) - INTERVAL '{{ var("LOOKBACK", "4 hours") }}'
FROM
{{ this }}
)
AND EXISTS (
SELECT
1
FROM
{{ ref('silver__complete_token_prices') }}
p
WHERE
p._inserted_timestamp > DATEADD('DAY', -14, SYSDATE())
AND p.price IS NOT NULL
AND p.token_address = t3.token_in
AND p.hour = DATE_TRUNC(
'hour',
t3.block_timestamp
)
)
GROUP BY
1
)
OR CONCAT(
t0.block_number,
'-',
t0.platform,
'-',
t0.version
) IN (
SELECT
CONCAT(
t4.block_number,
'-',
t4.platform,
'-',
t4.version
)
FROM
{{ this }}
t4
WHERE
t4.amount_out_usd IS NULL
AND t4._inserted_timestamp < (
SELECT
MAX(
_inserted_timestamp
) - INTERVAL '{{ var("LOOKBACK", "4 hours") }}'
FROM
{{ this }}
)
AND EXISTS (
SELECT
1
FROM
{{ ref('silver__complete_token_prices') }}
p
WHERE
p._inserted_timestamp > DATEADD('DAY', -14, SYSDATE())
AND p.price IS NOT NULL
AND p.token_address = t4.token_out
AND p.hour = DATE_TRUNC(
'hour',
t4.block_timestamp
)
)
GROUP BY
1
)
),
{% endif %}
FINAL AS (
SELECT
*
FROM
complete_dex_swaps
{% if is_incremental() and var(
'HEAL_MODEL'
) %}
UNION ALL
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
event_name,
token_in,
token_in_is_verified,
decimals_in,
symbol_in,
amount_in_unadj,
amount_in_heal AS amount_in,
amount_in_usd_heal AS amount_in_usd,
token_out,
token_out_is_verified,
decimals_out,
symbol_out,
amount_out_unadj,
amount_out_heal AS amount_out,
amount_out_usd_heal AS amount_out_usd,
pool_name_heal AS pool_name,
sender,
tx_to,
event_index,
platform,
protocol,
version,
_log_id,
_inserted_timestamp
FROM
heal_model
{% endif %}
)
SELECT
block_number,
block_timestamp,
tx_hash,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
pool_name,
event_name,
amount_in_unadj,
amount_in,
amount_in_usd,
amount_out_unadj,
amount_out,
amount_out_usd,
sender,
tx_to,
event_index,
platform,
protocol,
version,
token_in,
ifnull(token_in_is_verified, false) as token_in_is_verified,
token_out,
ifnull(token_out_is_verified, false) as token_out_is_verified,
symbol_in,
symbol_out,
decimals_in,
decimals_out,
_log_id,
_inserted_timestamp,
{{ dbt_utils.generate_surrogate_key(
['tx_hash','event_index']
) }} AS complete_dex_swaps_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp,
'{{ invocation_id }}' AS _invocation_id
FROM
FINAL qualify (ROW_NUMBER() over (PARTITION BY _log_id
ORDER BY
_inserted_timestamp DESC)) = 1

View File

@ -1,109 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = 'pool_address',
cluster_by = ['block_timestamp::DATE'],
tags = ['silver_dex','defi','dex','curated']
) }}
WITH created_pools AS (
SELECT
block_number,
block_timestamp,
tx_hash,
event_index,
contract_address,
regexp_substr_all(SUBSTR(DATA, 3, len(DATA)), '.{64}') AS segmented_data,
LOWER(CONCAT('0x', SUBSTR(topics [1] :: STRING, 27, 40))) AS token0,
LOWER(CONCAT('0x', SUBSTR(topics [2] :: STRING, 27, 40))) AS token1,
utils.udf_hex_to_int(
's2c',
topics [3] :: STRING
) :: INTEGER AS fee,
utils.udf_hex_to_int(
's2c',
segmented_data [0] :: STRING
) :: INTEGER AS tick_spacing,
CONCAT('0x', SUBSTR(segmented_data [1] :: STRING, 25, 40)) AS pool_address,
CONCAT(
tx_hash,
'-',
event_index
) AS _log_id,
modified_timestamp
FROM
{{ ref('core__fact_event_logs') }}
WHERE
topic_0 = '0x783cca1c0412dd0d695e784568c96da2e9c22ff989357a2e8b1d9b2b4e6b7118'
AND contract_address = '0xc35dadb65012ec5796536bd9864ed8773abc74c4' --Sushi/UniswapV3Factory
AND tx_succeeded
{% if is_incremental() %}
AND modified_timestamp >= (
SELECT
MAX(modified_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND modified_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
),
initial_info AS (
SELECT
contract_address,
regexp_substr_all(SUBSTR(DATA, 3, len(DATA)), '.{64}') AS segmented_data,
utils.udf_hex_to_int('s2c', CONCAT('0x', segmented_data [0] :: STRING)) :: FLOAT AS init_sqrtPriceX96,
utils.udf_hex_to_int('s2c', CONCAT('0x', segmented_data [1] :: STRING)) :: FLOAT AS init_tick,
pow(
1.0001,
init_tick
) AS init_price_1_0_unadj,
CONCAT(
tx_hash,
'-',
event_index
) AS _log_id,
modified_timestamp
FROM
{{ ref('core__fact_event_logs') }}
WHERE
topic_0 = '0x98636036cb66a9c19a37435efc1e90142190214e8abeb821bdba3f2990dd4c95'
AND tx_succeeded
{% if is_incremental() %}
AND modified_timestamp >= (
SELECT
MAX(modified_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND modified_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
)
SELECT
block_number,
block_timestamp,
tx_hash,
event_index,
p.contract_address,
token0,
token1,
fee,
(
fee / 10000
) :: FLOAT AS fee_percent,
tick_spacing,
pool_address,
COALESCE(
init_tick,
0
) AS init_tick,
p._log_id,
p.modified_timestamp
FROM
created_pools p
LEFT JOIN initial_info i
ON p.pool_address = i.contract_address qualify(ROW_NUMBER() over(PARTITION BY pool_address
ORDER BY
p.modified_timestamp DESC)) = 1

View File

@ -1,23 +0,0 @@
version: 2
models:
- name: silver_dex__sushi_v3_pools
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- POOL_ADDRESS
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- name: POOL_ADDRESS
tests:
- not_null
- name: TOKEN0
tests:
- not_null
- name: TOKEN1
tests:
- not_null

View File

@ -1,117 +0,0 @@
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = 'block_number',
cluster_by = ['block_timestamp::DATE'],
tags = ['silver_dex','defi','dex','curated']
) }}
WITH swaps_base AS (
SELECT
l.block_number,
l.block_timestamp,
l.tx_hash,
l.event_index,
l.origin_function_signature,
l.origin_from_address,
l.origin_to_address,
l.contract_address,
regexp_substr_all(SUBSTR(DATA, 3, len(DATA)), '.{64}') AS segmented_data,
CONCAT('0x', SUBSTR(topic_1, 27, 40)) AS sender,
CONCAT('0x', SUBSTR(topic_2, 27, 40)) AS recipient,
utils.udf_hex_to_int(
's2c',
segmented_data [0] :: STRING
) :: FLOAT AS amount0_unadj,
utils.udf_hex_to_int(
's2c',
segmented_data [1] :: STRING
) :: FLOAT AS amount1_unadj,
utils.udf_hex_to_int(
's2c',
segmented_data [2] :: STRING
) :: FLOAT AS sqrtPriceX96,
utils.udf_hex_to_int(
's2c',
segmented_data [3] :: STRING
) :: FLOAT AS liquidity,
utils.udf_hex_to_int(
's2c',
segmented_data [4] :: STRING
) :: FLOAT AS tick,
token0,
token1,
pool_address,
tick_spacing,
fee,
CONCAT(
l.tx_hash,
'-',
l.event_index
) AS _log_id,
l.modified_timestamp
FROM
{{ ref('core__fact_event_logs') }}
l
INNER JOIN {{ ref('silver_dex__sushi_v3_pools') }}
p
ON p.pool_address = l.contract_address
WHERE
l.block_timestamp :: DATE >= '2023-04-01'
AND topic_0 = '0xc42079f94a6350d7e6235f29174924f928cc2ac818eb64fed8004e115fbcca67'
AND tx_succeeded
{% if is_incremental() %}
AND l.modified_timestamp >= (
SELECT
MAX(modified_timestamp) - INTERVAL '12 hours'
FROM
{{ this }}
)
AND l.modified_timestamp >= SYSDATE() - INTERVAL '7 day'
{% endif %}
)
SELECT
block_number,
block_timestamp,
tx_hash,
event_index,
origin_function_signature,
origin_from_address,
origin_to_address,
contract_address,
pool_address,
recipient,
recipient AS tx_to,
sender,
fee,
tick,
tick_spacing,
liquidity,
token0,
token1,
amount0_unadj,
amount1_unadj,
CASE
WHEN amount0_unadj > 0 THEN ABS(amount0_unadj)
ELSE ABS(amount1_unadj)
END AS amount_in_unadj,
CASE
WHEN amount0_unadj < 0 THEN ABS(amount0_unadj)
ELSE ABS(amount1_unadj)
END AS amount_out_unadj,
CASE
WHEN amount0_unadj > 0 THEN token0
ELSE token1
END AS token_in,
CASE
WHEN amount0_unadj < 0 THEN token0
ELSE token1
END AS token_out,
_log_id,
modified_timestamp
FROM
swaps_base qualify(ROW_NUMBER() over(PARTITION BY _log_id
ORDER BY
modified_timestamp DESC)) = 1

View File

@ -1,43 +0,0 @@
version: 2
models:
- name: silver_dex__sushi_v3_swaps
tests:
- dbt_utils.unique_combination_of_columns:
combination_of_columns:
- _LOG_ID
columns:
- name: BLOCK_NUMBER
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- NUMBER
- FLOAT
- name: BLOCK_TIMESTAMP
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_LTZ
- TIMESTAMP_NTZ
- name: POOL_ADDRESS
tests:
- not_null
- name: RECIPIENT
tests:
- not_null
- name: SENDER
tests:
- not_null
- name: TOKEN0
tests:
- not_null
- name: TOKEN1
tests:
- not_null
- name: AMOUNT0_UNADJ
tests:
- not_null
- name: AMOUNT1_UNADJ
tests:
- not_null

View File

@ -1,3 +1,3 @@
packages:
- git: https://github.com/FlipsideCrypto/fsc-evm.git
revision: v4.7.2
revision: AN-6265/consolidate