swaps v2, upd logs, upd actions events, upd functioncall

This commit is contained in:
Jack Forgash 2024-01-23 12:40:25 -07:00
parent ce8c0a71cd
commit 7bdf566465
19 changed files with 304 additions and 419 deletions

View File

@ -70,3 +70,4 @@ dispatch:
query-comment:
comment: '{{ dbt_snowflake_query_tags.get_query_comment(node) }}'
append: true # Snowflake removes prefixed comments.

View File

@ -0,0 +1,5 @@
{% docs amount_in_raw %}
Amount in, in a swap or transfer, in adjusted form.
{% enddocs %}

View File

@ -0,0 +1,5 @@
{% docs amount_out_raw %}
Amount out, in a swap or transfer, in adjusted form.
{% enddocs %}

View File

@ -0,0 +1,6 @@
{% docs swap_input_data %}
JSON of input data for the swap, taken from the FunctionCall JSON.
Note, directional keys such as in and out are determined by the pool.
{% enddocs %}

View File

@ -23,7 +23,7 @@ SELECT
COALESCE(
logs_id,
{{ dbt_utils.generate_surrogate_key(
['action_id']
['log_id']
) }}
) AS fact_logs_id,
COALESCE(inserted_timestamp, _inserted_timestamp, '2000-01-01' :: TIMESTAMP_NTZ) AS inserted_timestamp,

View File

@ -14,15 +14,13 @@ WITH action_events AS (
FROM
{{ ref('silver__actions_events_s3') }}
WHERE
action_name = 'FunctionCall'
{% if var("MANUAL_FIX") %}
action_name = 'FunctionCall' {% if var("MANUAL_FIX") %}
AND {{ partition_load_manual('no_buffer') }}
{% else %}
AND {{ incremental_load_filter('_inserted_timestamp') }}
{% endif %}
),
decoding AS (
FINAL AS (
SELECT
action_id,
tx_hash,
@ -31,44 +29,27 @@ decoding AS (
block_id,
block_timestamp,
action_name,
action_data :args AS args,
COALESCE(TRY_PARSE_JSON(TRY_BASE64_DECODE_STRING(args)), args) AS args_decoded,
action_data :method_name :: STRING AS method_name,
COALESCE(
TRY_PARSE_JSON(TRY_BASE64_DECODE_STRING(action_data :args)),
action_data :args
) AS args,
action_data :deposit :: NUMBER AS deposit,
action_data :gas :: NUMBER AS attached_gas,
action_data :method_name :: STRING AS method_name,
_load_timestamp,
logs,
_partition_by_block_number,
_inserted_timestamp
_inserted_timestamp,
_load_timestamp,
{{ dbt_utils.generate_surrogate_key(
['action_id']
) }} AS actions_events_function_call_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp,
'{{ invocation_id }}' AS _invocation_id
FROM
action_events
),
function_calls AS (
SELECT
action_id,
tx_hash,
receiver_id,
signer_id,
block_id,
block_timestamp,
action_name,
method_name,
args_decoded AS args,
deposit,
attached_gas,
_load_timestamp,
_partition_by_block_number,
_inserted_timestamp
FROM
decoding
)
SELECT
*,
{{ dbt_utils.generate_surrogate_key(
['action_id']
) }} AS actions_events_function_call_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp,
'{{ invocation_id }}' AS _invocation_id
*
FROM
function_calls
FINAL

View File

@ -45,20 +45,12 @@ models:
- name: ATTACHED_GAS
description: "{{ doc('attached_gas')}}"
- name: LOGS
description: "{{ doc('logs')}}"
- name: _PARTITION_BY_BLOCK_NUMBER
description: "{{ doc('_partition_by_block_number')}}"
- name: _LOAD_TIMESTAMP
description: "{{ doc('_load_timestamp')}}"
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_NTZ
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 1
- name: _INSERTED_TIMESTAMP
description: "{{ doc('_inserted_timestamp')}}"

View File

@ -31,9 +31,13 @@ flatten_actions AS (
block_id,
block_timestamp,
chunk_hash,
_load_timestamp,
logs,
_partition_by_block_number,
_inserted_timestamp,
COALESCE(
_inserted_timestamp,
_load_timestamp
) AS _inserted_timestamp,
_load_timestamp,
receipt_actions,
execution_outcome,
VALUE AS action_object,
@ -46,20 +50,31 @@ flatten_actions AS (
),
FINAL AS (
SELECT
tx_hash,
receipt_object_id,
concat_ws(
'-',
receipt_object_id,
action_index
) AS action_id,
receiver_id,
signer_id,
chunk_hash,
tx_hash,
receipt_object_id,
block_id,
block_timestamp,
chunk_hash,
_load_timestamp,
_partition_by_block_number,
_inserted_timestamp,
this,
action_index,
key AS action_name,
TRY_PARSE_JSON(VALUE) AS action_data,
action_index
logs,
_partition_by_block_number,
_inserted_timestamp,
_load_timestamp,
{{ dbt_utils.generate_surrogate_key(
['receipt_object_id', 'action_index']
) }} AS actions_events_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp,
'{{ invocation_id }}' AS _invocation_id
FROM
flatten_actions,
LATERAL FLATTEN(
@ -67,29 +82,6 @@ FINAL AS (
)
)
SELECT
concat_ws(
'-',
receipt_object_id,
action_index
) AS action_id,
receiver_id,
signer_id,
chunk_hash,
tx_hash,
receipt_object_id,
block_id,
block_timestamp,
action_index,
action_name,
action_data,
_load_timestamp,
_partition_by_block_number,
_inserted_timestamp,
{{ dbt_utils.generate_surrogate_key(
['receipt_object_id', 'action_index']
) }} AS actions_events_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp,
'{{ invocation_id }}' AS _invocation_id
*
FROM
FINAL

View File

@ -52,20 +52,12 @@ models:
- name: ACTION_DATA
description: "{{ doc('action_data')}}"
- name: LOGS
description: "{{ doc('logs')}}"
- name: _PARTITION_BY_BLOCK_NUMBER
description: "{{ doc('_partition_by_block_number')}}"
- name: _LOAD_TIMESTAMP
description: "{{ doc('_load_timestamp')}}"
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_NTZ
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 1
- name: _INSERTED_TIMESTAMP
description: "{{ doc('_inserted_timestamp')}}"

View File

@ -6,7 +6,7 @@
cluster_by = ["block_timestamp::DATE"],
tags = ['curated']
) }}
{# DEPRECATED JANUARY 2024 #}
WITH base_swap_calls AS (
SELECT

View File

@ -3,7 +3,8 @@ version: 2
models:
- name: silver__dex_swaps_s3
description: |-
This table records all the swap transactions occurring in NEAR.
This table records all the swap transactions occurring in NEAR. This model is being deprecated as of January 2024. It will remain live through February for users to migrate to the new model.
This logic is outdated / inaccurate.
columns:
- name: BLOCK_ID

View File

@ -0,0 +1,119 @@
{{ config(
materialized = 'incremental',
incremental_strategy='merge',
merge_exclude_columns = ["inserted_timestamp"],
unique_key = 'dex_swaps_v2_id',
tags = ['curated'],
) }}
WITH swap_logs AS (
SELECT
*
FROM
{{ ref('silver__logs_s3') }}
WHERE
receipt_succeeded
AND clean_log LIKE 'Swapped%'
),
receipts AS (
SELECT
receipt_object_id,
receipt_actions,
receiver_id,
signer_id
FROM
{{ ref('silver__streamline_receipts_final') }}
WHERE
receipt_object_id IN (
SELECT
receipt_object_id
FROM
swap_logs
)
),
swap_outcome AS (
SELECT
tx_hash,
receipt_object_id,
block_id,
block_timestamp,
receiver_id,
signer_id,
ROW_NUMBER() over (
PARTITION BY receipt_object_id
ORDER BY
log_index ASC
) - 1 AS swap_index,
clean_log AS LOG,
REGEXP_REPLACE(
LOG,
'.*Swapped (\\d+) (.*) for (\\d+) (.*)',
'\\1'
) :: INT AS amount_out_raw,
REGEXP_REPLACE(
LOG,
'.*Swapped \\d+ (\\S+) for (\\d+) (.*)',
'\\1'
) :: STRING AS token_out,
REGEXP_REPLACE(
LOG,
'.*Swapped \\d+ \\S+ for (\\d+) (.*)',
'\\1'
) :: INT AS amount_in_raw,
REGEXP_REPLACE(
LOG,
'.*Swapped \\d+ \\S+ for \\d+ (.*)',
'\\1'
) :: STRING AS token_in,
_partition_by_block_number,
_inserted_timestamp,
{{ dbt_utils.generate_surrogate_key(
['receipt_object_id', 'log_index']
) }} AS dex_swaps_v2_id,
inserted_timestamp,
modified_timestamp,
_invocation_id
FROM
swap_logs
),
FINAL AS (
SELECT
tx_hash,
o.receipt_object_id,
block_id,
block_timestamp,
receiver_id,
signer_id,
swap_index,
LOG,
amount_out_raw,
token_out,
amount_in_raw,
token_in,
ARRAY_SIZE(
receipt_actions :receipt :Action :actions
) AS num_actions,
TRY_PARSE_JSON(
TRY_PARSE_JSON(
TRY_BASE64_DECODE_STRING(
receipt_actions :receipt :Action :actions [0] :FunctionCall :args
)
) :msg
) :actions [swap_index] AS swap_input_data,
r.receiver_id AS receipt_receiver_id,
r.signer_id AS receipt_signer_id,
_partition_by_block_number,
_inserted_timestamp,
dex_swaps_v2_id,
inserted_timestamp,
modified_timestamp,
_invocation_id
FROM
swap_outcome o
LEFT JOIN receipts r USING (receipt_object_id)
)
SELECT
*
FROM
FINAL

View File

@ -0,0 +1,89 @@
version: 2
models:
- name: silver__dex_swaps_v2
description: |-
Parses log output data for swap information. It was determined logs must be used over inputs in a FunctionCall as only the output contains actual swap information. See tx AfvgkUxP8taJNBLaZYvFumFrrePpJujb2gjQJz7YbRiM as an example.
columns:
- name: TX_HASH
description: "{{ doc('tx_hash')}}"
tests:
- not_null:
where: _inserted_timestamp <= CURRENT_TIMESTAMP - interval '1 hour'
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- STRING
- VARCHAR
- name: RECEIPT_OBJECT_ID
description: "{{ doc('receipt_object_id')}}"
tests:
- not_null
- name: BLOCK_ID
description: "{{ doc('block_id')}}"
- name: BLOCK_TIMESTAMP
description: "{{ doc('block_timestamp')}}"
tests:
- not_null:
where: _inserted_timestamp <= CURRENT_TIMESTAMP - interval '1 hour'
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_NTZ
- name: RECEIVER_ID
description: "{{ doc('receiver_id')}}"
- name: SIGNER_ID
description: "{{ doc('signer_id')}}"
- name: SWAP_INDEX
description: "{{ doc('index')}}"
- name: LOG
description: "{{ doc('clean_log')}}"
- name: AMOUNT_OUT_RAW
description: "{{ doc('amount_out_raw')}}"
tests:
- not_null
- name: TOKEN_OUT
description: "{{ doc('token_out')}}"
tests:
- not_null
- name: AMOUNT_IN_RAW
description: "{{ doc('amount_in_raw')}}"
tests:
- not_null
- name: TOKEN_IN
description: "{{ doc('token_in')}}"
tests:
- not_null
- name: SWAP_INPUT_DATA
description: "{{ doc('swap_input_data')}}"
- name: _PARTITION_BY_BLOCK_NUMBER
description: "{{doc('_partition_by_block_number')}}"
- name: _INSERTED_TIMESTAMP
description: "{{doc('_inserted_timestamp')}}"
- name: DEX_SWAPS_V2_ID
description: "{{doc('id')}}"
tests:
- unique
- name: INSERTED_TIMESTAMP
description: "{{doc('inserted_timestamp')}}"
- name: MODIFIED_TIMESTAMP
description: "{{doc('modified_timestamp')}}"
- name: _INVOCATION_ID
description: "{{doc('invocation_id')}}"

View File

@ -1,85 +0,0 @@
{{ config(
materialized = 'incremental',
merge_exclude_columns = ["inserted_timestamp"],
unique_key = 'ref_swaps_id',
tags = ['curated'],
) }}
WITH ref_finance AS (
SELECT
*
FROM
{{ ref('silver__streamline_receipts_final') }}
WHERE
receiver_id IN (
'ref-finance.near',
'v2.ref-finance.near'
)
AND block_timestamp >= CURRENT_DATE - INTERVAL '90 days'
AND receipt_succeeded
),
flatten_actions AS (
SELECT
tx_hash,
block_id,
block_timestamp,
receipt_object_id,
receiver_id,
signer_id,
INDEX AS action_index,
logs,
-- VALUE,
VALUE :FunctionCall :method_name :: STRING AS method_name,
TRY_PARSE_JSON(
TRY_BASE64_DECODE_STRING(
VALUE :FunctionCall :args
)
) AS args,
receipt_succeeded,
_partition_by_block_number,
COALESCE(
_inserted_timestamp,
_load_timestamp
) AS _inserted_timestamp
FROM
ref_finance,
LATERAL FLATTEN (
receipt_actions :receipt :Action :actions
)
),
flatten_function_call AS (
SELECT
tx_hash,
block_id,
block_timestamp,
receipt_object_id,
receiver_id,
signer_id,
action_index,
logs,
method_name,
VALUE,
VALUE :amount_in :: INT AS amount_in,
VALUE :min_amount_out :: INT AS min_amount_out,
VALUE :token_in :: STRING AS token_in,
VALUE :token_out :: STRING AS token_out,
VALUE :pool_id :: STRING AS pool_id,
-- TODO check if always int then change dtype
INDEX AS swap_index
FROM
flatten_actions,
LATERAL FLATTEN(
args :actions
)
)
SELECT
*,
{{ dbt_utils.generate_surrogate_key(
['receipt_object_id', 'action_index', 'swap_index']
) }} AS ref_swaps_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp,
'{{ invocation_id }}' AS _invocation_id
FROM
flatten_function_call

View File

@ -1,193 +0,0 @@
select * from near.silver.streamline_receipts_final
where block_id >= 110575625
and tx_hash in
(
'GP9Q2T5Y5b6RgFHuWjYTjpYdzMLb6L8fYW9mDHWcjGMH', -- neko to wNEAR
'Aix5SgyxvrR64oXCVo3iQuEWu8iqUXPyoXUVuzKhn2RD', -- neko to NEAR
'7kZP9UPYFbGFC9zBhyrUtit1AL4KtMWYtrG62LJnDexY', -- NEAR to REF
'GHS42kw1eVBARdcQWXEz4fhGMS7vuSyrqgYnBZ8kJULC' -- REF to NEAR
)
order by tx_hash, block_id;
select * from near.silver.streamline_receipts_final
where block_id >= 107000000
and tx_hash = 'AGkmKnRkVEdN2kF79HFeX41wso5jkgJHnxNNXfundde1'
order by tx_hash, block_id;
select distinct receiver_id from near.silver.streamline_receipts_final
where receiver_id ilike '%ref-finance.near';
-- 1token.v2.ref-finance.near
-- v2ref-finance.near
-- token.v2.ref-finance.near
-- v2.ref-finance.near
-- ref-finance.near
-- token.ref-finance.near
-- tokenv2ref-finance.near
-- xtoken.ref-finance.near
select * from near.silver.streamline_receipts_final
where receiver_id = 'ref-finance.near'
limit 15;
select
receiver_id,
count(1),
any_value(clean_log)
from near.silver.logs_s3
where clean_log like 'Swap%'
group by 1;
with
swap_receiver_ids as (
select *from near.silver.logs_s3
where clean_log like 'Swapped%'
),
actions as (
select * from near.silver.actions_events_function_call_s3
where split(action_id, '-')[0]::STRING in (select * from swap_receiver_ids)
)
select * from actions limit 5;
-- the inputs are wildly different, based on each contract
-- but they all follow basically the same log output...
with
swap_logs as (
select * from near.silver.logs_s3
where clean_log like 'Swapped%'
)
select
tx_hash,
receipt_object_id,
block_id,
block_timestamp,
receiver_id,
signer_id,
clean_log as log,
REGEXP_REPLACE(log, '.*Swapped (\\d+) (.*) for (\\d+) (.*)', '\\1')::int AS amount_out_raw,
REGEXP_REPLACE(log, '.*Swapped \\d+ (\\S+) for (\\d+) (.*)', '\\1')::string AS token_out,
REGEXP_REPLACE(log, '.*Swapped \\d+ \\S+ for (\\d+) (.*)', '\\1')::int AS amount_in_raw,
REGEXP_REPLACE(log, '.*Swapped \\d+ \\S+ for \\d+ (.*)', '\\1')::string AS token_in,
coalesce(_inserted_timestamp, _load_timestamp) as _inserted_timestamp,
_partition_by_block_number,
logs_id,
inserted_timestamp,
modified_timestamp,
_invocation_id
from swap_logs
where log ilike '%with admin fee%'
limit 15;
-- mapping just ref
with
ref_finance as (
select *,
array_size(receipt_actions:receipt:Action:actions::array) as action_ct
from near.silver.streamline_receipts_final
where receiver_id in ('ref-finance.near', 'v2.ref-finance.near')
and block_timestamp >= current_date - interval '7 days'
and receipt_succeeded
),
flatten_actions as (
select
tx_hash,
block_id,
block_timestamp,
receipt_object_id,
receiver_id,
signer_id,
action_ct,
INDEX as action_index,
logs,
-- VALUE,
VALUE:FunctionCall:method_name::string as method_name,
try_parse_json(try_base64_decode_string(VALUE:FunctionCall:args)) as args,
receipt_succeeded
from ref_finance, lateral flatten (receipt_actions:receipt:Action:actions)
where true
-- and method_name = 'swap'
-- and tx_hash = '5RBZcAPHgE87qBqMZosNYQzTy3zUMSmHDpXywdFUbuEh'
),
flatten_function_call as (
select
tx_hash,
block_id,
block_timestamp,
receipt_object_id,
receiver_id,
signer_id,
action_ct,
action_index,
logs,
method_name,
VALUE,
VALUE:amount_in::int as amount_in,
VALUE:min_amount_out::int as min_amount_out,
VALUE:token_in::string as token_in,
VALUE:token_out::string as token_out,
VALUE:pool_id::string as pool_id, -- TODO check if always int then change dtype
INDEX as swap_index
from flatten_actions, lateral flatten(args:actions)
)
select * from flatten_function_call
limit 50;
select * from near_dev.silver.ref_swaps
where tx_hash = 'AfvgkUxP8taJNBLaZYvFumFrrePpJujb2gjQJz7YbRiM'
limit 50;
-- {
-- "actions": [
-- {
-- "amount_in": "1228685760498046875000000",
-- "min_amount_out": "0",
-- "pool_id": 4,
-- "token_in": "wrap.near",
-- "token_out": "dac17f958d2ee523a2206206994597c13d831ec7.factory.bridge.near"
-- },
-- {
-- "min_amount_out": "0",
-- "pool_id": 1910,
-- "token_in": "dac17f958d2ee523a2206206994597c13d831ec7.factory.bridge.near",
-- "token_out": "a0b86991c6218b36c1d19d4a2e9eb0ce3606eb48.factory.bridge.near"
-- },
-- {
-- "min_amount_out": "0",
-- "pool_id": 3024,
-- "token_in": "a0b86991c6218b36c1d19d4a2e9eb0ce3606eb48.factory.bridge.near",
-- "token_out": "marmaj.tkn.near"
-- },
-- {
-- "min_amount_out": "0",
-- "pool_id": 3042,
-- "token_in": "marmaj.tkn.near",
-- "token_out": "aaaaaa20d9e0e2461697782ef11675f668207961.factory.bridge.near"
-- },
-- {
-- "min_amount_out": "1228685760498046875000000",
-- "pool_id": 1395,
-- "token_in": "aaaaaa20d9e0e2461697782ef11675f668207961.factory.bridge.near",
-- "token_out": "wrap.near"
-- }
-- ]
-- }
-- vs logs below. So, the input data does not have the intermediate token amounts...
-- nearblocks must be parsing the log, then. Or retrieving the amounts from elsewhere, but likely logs
-- Swapped 1228685760498046875000000 wrap.near for 1370018 dac17f958d2ee523a2206206994597c13d831ec7.factory.bridge.near
-- Exchange v2.ref-finance.near got 4287440486291960659 shares, No referral fee
-- Swapped 1370018 dac17f958d2ee523a2206206994597c13d831ec7.factory.bridge.near for 1369532 a0b86991c6218b36c1d19d4a2e9eb0ce3606eb48.factory.bridge.near, total fee 685, admin fee 137
-- Exchange v2.ref-finance.near got 135820392861051 shares, No referral fee
-- Swapped 1369532 a0b86991c6218b36c1d19d4a2e9eb0ce3606eb48.factory.bridge.near for 1135700395033097621 marmaj.tkn.near
-- Exchange v2.ref-finance.near got 77847931402681928990 shares, No referral fee
-- Swapped 1135700395033097621 marmaj.tkn.near for 26433575129761361463 aaaaaa20d9e0e2461697782ef11675f668207961.factory.bridge.near
-- Exchange v2.ref-finance.near got 999982423502654312902 shares, No referral fee
-- Swapped 26433575129761361463 aaaaaa20d9e0e2461697782ef11675f668207961.factory.bridge.near for 1233967190597137433407275 wrap.near
-- Exchange v2.ref-finance.near got 57194534600681424 shares, No referral fee

View File

@ -50,7 +50,7 @@ function_call AS (
),
standard_logs AS (
SELECT
action_id AS logs_id,
logs_id,
concat_ws(
'-',
receipt_object_id,

View File

@ -2,7 +2,7 @@
materialized = "incremental",
merge_exclude_columns = ["inserted_timestamp"],
cluster_by = ["_inserted_timestamp::DATE","block_timestamp::DATE"],
unique_key = "action_id",
unique_key = "log_id",
incremental_strategy = "merge",
tags = ['curated']
) }}
@ -24,20 +24,34 @@ WITH receipts AS (
),
FINAL AS (
SELECT
tx_hash,
receipt_object_id,
block_id,
block_timestamp,
tx_hash,
receipt_object_id,
concat_ws(
'-',
receipt_object_id,
INDEX
) AS log_id,
INDEX AS log_index,
receiver_id,
signer_id,
_load_timestamp,
_partition_by_block_number,
_inserted_timestamp,
COALESCE(TRY_PARSE_JSON(VALUE), TRY_PARSE_JSON(SPLIT(VALUE, 'EVENT_JSON:') [1]), VALUE :: STRING) AS clean_log,
VALUE ILIKE 'event_json:%' AS is_standard,
gas_burnt,
receipt_succeeded,
INDEX AS action_index,
COALESCE(TRY_PARSE_JSON(VALUE), TRY_PARSE_JSON(SPLIT(VALUE, 'EVENT_JSON:') [1]), VALUE :: STRING) AS clean_log,
VALUE ILIKE 'event_json:%' AS is_standard
_partition_by_block_number,
COALESCE(
_inserted_timestamp,
_load_timestamp
) AS _inserted_timestamp,
_load_timestamp,
{{ dbt_utils.generate_surrogate_key(
['log_id']
) }} AS logs_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp,
'{{ invocation_id }}' AS _invocation_id
FROM
receipts,
LATERAL FLATTEN(
@ -45,29 +59,6 @@ FINAL AS (
)
)
SELECT
concat_ws(
'-',
receipt_object_id,
action_index
) AS action_id,
receiver_id,
signer_id,
clean_log,
is_standard,
tx_hash,
receipt_object_id,
block_id,
gas_burnt,
block_timestamp,
receipt_succeeded,
_load_timestamp,
_partition_by_block_number,
_inserted_timestamp,
{{ dbt_utils.generate_surrogate_key(
['action_id']
) }} AS logs_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp,
'{{ invocation_id }}' AS _invocation_id
*
FROM
FINAL

View File

@ -42,16 +42,6 @@ models:
- name: _PARTITION_BY_BLOCK_NUMBER
description: "{{ doc('_partition_by_block_number')}}"
- name: _LOAD_TIMESTAMP
description: "{{ doc('_load_timestamp')}}"
tests:
- not_null
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- TIMESTAMP_NTZ
- dbt_expectations.expect_row_values_to_have_recent_data:
datepart: day
interval: 1
- name: CLEAN_LOG
description: "{{ doc('clean_log')}}"

View File

@ -13,7 +13,6 @@ WITH action_events AS(
tx_hash,
action_id,
action_data :deposit :: INT AS deposit,
_load_timestamp,
_partition_by_block_number,
_inserted_timestamp
FROM