add contract events and general cleanup

This commit is contained in:
Eric Laurello 2025-02-20 11:23:29 -05:00
parent 4d53ca391f
commit 754b1445e5
14 changed files with 284 additions and 10 deletions

View File

@ -98,15 +98,15 @@ vars:
- INTERNAL_DEV
prod:
API_INTEGRATION:
EXTERNAL_FUNCTION_URI:
API_INTEGRATION: aws_stellar_api_prod_v2
EXTERNAL_FUNCTION_URI: qavdasgp43.execute-api.us-east-1.amazonaws.com/prod/
ROLES:
- AWS_LAMBDA_STELLAR_API
- INTERNAL_DEV
- DBT_CLOUD_STELLAR
prod-2xl:
API_INTEGRATION:
EXTERNAL_FUNCTION_URI:
API_INTEGRATION: aws_stellar_api_prod_v2
EXTERNAL_FUNCTION_URI: qavdasgp43.execute-api.us-east-1.amazonaws.com/prod/
ROLES:
- AWS_LAMBDA_STELLAR_API
- INTERNAL_DEV

View File

@ -0,0 +1,10 @@
{{ config (
materialized = 'view'
) }}
{{ streamline_external_table_query_v2(
model = "history_contract_events",
partition_function = "TRY_TO_DATE(left(split_part(split_part(file_name, '=', -1), '/', -1),8), 'YYYYMMDD')",
partition_name = "partition_gte_id",
unique_key = "transaction_hash",
other_cols = "partition_id"
) }}

View File

@ -0,0 +1,10 @@
{{ config (
materialized = 'view'
) }}
{{ streamline_external_table_FR_query_v2(
model = "history_contract_events",
partition_function = "TRY_TO_DATE(left(split_part(split_part(file_name, '=', -1), '/', -1),8), 'YYYYMMDD')",
partition_name = "partition_gte_id",
unique_key = "transaction_hash",
other_cols = "partition_id"
) }}

View File

@ -24,6 +24,7 @@ There is more information on how to use dbt docs in the last section of this doc
**Fact Tables:**
- [core.fact_accounts](https://flipsidecrypto.github.io/stellar-models/#!/model/model.stellar_models.core__fact_accounts)
- [core.fact_contract_events](https://flipsidecrypto.github.io/stellar-models/#!/model/model.stellar_models.core__fact_contract_events)
- [core.fact_ledgers](https://flipsidecrypto.github.io/stellar-models/#!/model/model.stellar_models.core__fact_ledgers)
- [core.fact_operations](https://flipsidecrypto.github.io/stellar-models/#!/model/model.stellar_models.core__fact_operations)
- [core.fact_transactions](https://flipsidecrypto.github.io/stellar-models/#!/model/model.stellar_models.core__fact_transactions)

View File

@ -0,0 +1,4 @@
{% docs in_successful_contract_call %}
A boolean value indicated if the event was in a successful contract call.
{% enddocs %}

View File

@ -0,0 +1,43 @@
-- depends_on: {{ ref('silver__contract_events') }}
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = ["transaction_hash","closed_at::DATE"],
cluster_by = ['block_timestamp::DATE','closed_at::DATE'],
post_hook = "ALTER TABLE {{ this }} ADD SEARCH OPTIMIZATION ON EQUALITY(transaction_id,transaction_hash,contract_id,topics_decoded,data_decoded);",
tags = ['scheduled_core']
) }}
SELECT
transaction_hash,
transaction_id,
closed_at,
closed_at AS block_timestamp,
ledger_sequence,
SUCCESSFUL,
in_successful_contract_call,
contract_id,
TYPE,
type_string,
topics,
topics_decoded,
DATA,
data_decoded,
contract_event_xdr,
{{ dbt_utils.generate_surrogate_key(
['transaction_hash','_event_order']
) }} AS fact_contract_events_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp
FROM
{{ ref('silver__contract_events') }}
{% if is_incremental() %}
WHERE
modified_timestamp >= (
SELECT
MAX(modified_timestamp)
FROM
{{ this }}
)
{% endif %}

View File

@ -0,0 +1,75 @@
version: 2
models:
- name: core__fact_contract_events
description: Fact table containing contract event details from the Stellar network.
columns:
- name: TRANSACTION_HASH
description: '{{ doc("transaction_hash") }}'
tests:
- not_null:
where: modified_timestamp > current_date - {{ var('test_days_threshold', 3) }}
- name: TRANSACTION_ID
description: '{{ doc("transaction_id") }}'
- name: CLOSED_AT
description: '{{ doc("closed_at") }}'
tests:
- not_null:
where: modified_timestamp > current_date - {{ var('test_days_threshold', 3) }}
- name: BLOCK_TIMESTAMP
description: '{{ doc("block_timestamp") }}'
- name: LEDGER_SEQUENCE
description: '{{ doc("ledger_sequence") }}'
tests:
- not_null:
where: modified_timestamp > current_date - {{ var('test_days_threshold', 3) }}
- name: SUCCESSFUL
description: '{{ doc("successful") }}'
tests:
- not_null:
where: modified_timestamp > current_date - {{ var('test_days_threshold', 3) }}
- name: IN_SUCCESSFUL_CONTRACT_CALL
description: '{{ doc("in_successful_contract_call") }}'
- name: CONTRACT_ID
description: '{{ doc("contract_id") }}'
- name: TYPE
description: 'The contract event type id'
- name: TYPE_STRING
description: 'The contract event type value'
- name: TOPICS
description: 'The raw event topics'
- name: TOPICS_DECODED
description: "The decoded event topics"
- name: DATA
description: 'The raw event data'
- name: DATA_DECODED
description: 'The decoded event data'
- name: contract_event_xdr
description: 'The raw contract event xdr'
- name: fact_contract_events_id
description: '{{ doc("pk") }}'
- name: INSERTED_TIMESTAMP
description: '{{ doc("inserted_timestamp") }}'
- name: MODIFIED_TIMESTAMP
description: '{{ doc("modified_timestamp") }}'
- name: _INVOCATION_ID
description: '{{ doc("invocation_id") }}'

View File

@ -142,7 +142,6 @@ SELECT
trustline_account_id,
trustor_muxed,
trustor_muxed_id,
_inserted_timestamp,
{{ dbt_utils.generate_surrogate_key(['id']) }} AS fact_operations_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp

View File

@ -55,7 +55,6 @@ SELECT
rent_fee_charged,
tx_signers,
refundable_fee,
_inserted_timestamp,
{{ dbt_utils.generate_surrogate_key(
['id']
) }} AS fact_transactions_id,

View File

@ -34,7 +34,6 @@ SELECT
batch_run_date,
batch_insert_ts,
ledger_sequence,
_inserted_timestamp,
{{ dbt_utils.generate_surrogate_key(
['liquidity_pool_id', 'closed_at']
) }} AS fact_liquidity_pools_id,

View File

@ -38,7 +38,6 @@ SELECT
trade_type,
rounding_slippage,
seller_is_exact,
_inserted_timestamp,
{{ dbt_utils.generate_surrogate_key(
['history_operation_id','trade_order']
) }} AS fact_trades_id,

View File

@ -176,8 +176,7 @@ WITH operations AS (
operation_result_code,
operation_trace_code,
details_json,
modified_timestamp,
_inserted_timestamp
modified_timestamp
FROM
{{ ref('core__fact_operations') }}

View File

@ -0,0 +1,123 @@
-- depends_on: {{ ref('bronze__contract_events') }}
{{ config(
materialized = 'incremental',
incremental_strategy = 'delete+insert',
unique_key = ["transaction_hash","closed_at::DATE"],
cluster_by = ['closed_at::DATE','partition_id','modified_timestamp::DATE'],
tags = ['scheduled_core'],
) }}
{% if execute %}
{% if is_incremental() %}
{% set max_is_query %}
SELECT
MAX(_inserted_timestamp) AS _inserted_timestamp,
MAX(partition_gte_id) AS partition__gte_id
FROM
{{ this }}
{% endset %}
{% set result = run_query(max_is_query) %}
{% set max_is = result [0] [0] %}
{% set max_part = result [0] [1] %}
{% endif %}
{% endif %}
WITH pre_final AS (
SELECT
partition_id,
partition_gte_id,
VALUE :transaction_hash :: STRING AS transaction_hash,
VALUE :transaction_id :: INTEGER AS transaction_id,
TO_TIMESTAMP(
VALUE :closed_at :: INT,
6
) AS closed_at,
VALUE :ledger_sequence :: INTEGER AS ledger_sequence,
VALUE :"successful" :: BOOLEAN AS SUCCESSFUL,
VALUE :in_successful_contract_call :: BOOLEAN AS in_successful_contract_call,
VALUE :contract_id :: STRING AS contract_id,
VALUE :type :: INTEGER AS TYPE,
VALUE :type_string :: STRING AS type_string,
TRY_PARSE_JSON(
VALUE :topics
) AS topics,
TRY_PARSE_JSON(
VALUE :topics_decoded
) AS topics_decoded,
TRY_PARSE_JSON(
VALUE :data
) AS DATA,
TRY_PARSE_JSON(
VALUE :data_decoded
) AS data_decoded,
VALUE :contract_event_xdr :: STRING AS contract_event_xdr,
VALUE :batch_id :: STRING AS batch_id,
TO_TIMESTAMP(
VALUE :batch_run_date :: INT,
6
) AS batch_run_date,
TO_TIMESTAMP(
VALUE :batch_insert_ts :: INT,
6
) AS batch_insert_ts,
_inserted_timestamp
FROM
{% if is_incremental() %}
{{ ref('bronze__contract_events') }}
{% else %}
{{ ref('bronze__contract_events_FR') }}
{% endif %}
{% if is_incremental() %}
WHERE
partition_gte_id >= '{{ max_part }}'
AND _inserted_timestamp > '{{ max_is }}'
{% endif %}
{# this is intentionally a rank and not a row_num due to no true PK on the table #}
qualify RANK() over (
PARTITION BY transaction_hash
ORDER BY
batch_insert_ts DESC,
_inserted_timestamp DESC
) = 1
)
SELECT
partition_id,
partition_gte_id,
transaction_hash,
transaction_id,
closed_at,
ledger_sequence,
SUCCESSFUL,
in_successful_contract_call,
contract_id,
TYPE,
type_string,
topics,
topics_decoded,
DATA,
data_decoded,
contract_event_xdr,
batch_id,
batch_run_date,
batch_insert_ts,
-- this isn't the order they were executed in but just a random rank to get a PK
ROW_NUMBER() over (
PARTITION BY transaction_hash
ORDER BY
SYSDATE()
) AS _event_order,
_inserted_timestamp,
{{ dbt_utils.generate_surrogate_key(
['transaction_hash','_event_order']
) }} AS contract_events_id,
SYSDATE() AS inserted_timestamp,
SYSDATE() AS modified_timestamp,
'{{ invocation_id }}' AS _invocation_id
FROM
pre_final

View File

@ -0,0 +1,13 @@
version: 2
models:
- name: silver__contract_events
columns:
- name: transaction_hash
description: "{{ doc('transaction_hash') }}"
tests:
- not_null:
where: modified_timestamp > current_date - {{ var('test_days_threshold', 3) }}
- dbt_expectations.expect_column_values_to_be_in_type_list:
column_type_list:
- STRING
- VARCHAR