Merge branch 'main' of github.com:FlipsideCrypto/kaia-models into AN-5337/kaia-phase-2-changes

This commit is contained in:
mattromano 2025-03-12 08:56:12 -07:00
commit fb7d480486
4 changed files with 106 additions and 34 deletions

View File

@ -0,0 +1,50 @@
name: dbt_run_trace_backfill
run-name: dbt_run_trace_backfill
on:
workflow_dispatch:
schedule:
# Runs “At Minute 50 every hour” (see https://crontab.guru)
- cron: '50 * * * *'
env:
DBT_PROFILES_DIR: ./
ACCOUNT: "${{ vars.ACCOUNT }}"
ROLE: "${{ vars.ROLE }}"
USER: "${{ vars.USER }}"
PASSWORD: "${{ secrets.PASSWORD }}"
REGION: "${{ vars.REGION }}"
DATABASE: "${{ vars.DATABASE }}"
WAREHOUSE: "${{ vars.WAREHOUSE }}"
SCHEMA: "${{ vars.SCHEMA }}"
concurrency:
group: ${{ github.workflow }}
jobs:
run_dbt_jobs:
runs-on: ubuntu-latest
environment:
name: workflow_prod_2xl
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- name: install dependencies
run: |
pip install -r requirements.txt
dbt deps
- name: Run DBT Jobs
run: |
dbt run --threads 1 --vars '{"full_reload_mode": true}' -m silver__traces2 silver__fact_traces2

View File

@ -47,7 +47,16 @@ row_nos AS (
batched AS ({% for item in range(150) %}
SELECT
rn.contract_address,
live.udf_api(concat('https://api-cypress.klaytnscope.com/v2/accounts/',contract_address)) as abi_data,
live.udf_api(
'GET',
CONCAT('https://mainnet-oapi.kaiascan.io/api?module=contract&action=getabi&address=', rn.contract_address, '&apikey={key}'),
OBJECT_CONSTRUCT(
'Content-Type', 'application/json',
'fsc-quantum-state', 'livequery'
),
NULL,
'Vault/prod/block_explorers/kaia_scan'
) as abi_data,
SYSDATE() AS _inserted_timestamp
FROM
row_nos rn

View File

@ -23,27 +23,29 @@ WITH silver_traces AS (
{{ ref('silver__traces2') }}
WHERE
1 = 1
AND block_number > 160000000
{% if is_incremental() and not full_reload_mode %}
{% if is_incremental() and not var('full_reload_mode', false) %}
AND block_number > 160000000
AND modified_timestamp > (
SELECT
MAX(modified_timestamp)
FROM
{{ this }}
WHERE
block_number > 160000000
)
{% elif is_incremental() and full_reload_mode %}
AND block_number BETWEEN (
SELECT
MAX(block_number)
FROM
{{ this }}
)
AND (
SELECT
MAX(block_number) + 5000000
FROM
{{ this }}
{% elif is_incremental() and var('full_reload_mode', false) %}
AND block_number < 80000000
AND modified_timestamp > COALESCE(
(
SELECT
MAX(modified_timestamp)
FROM
{{ this }}
WHERE
block_number < 80000000
),
'2024-01-01'
)
{% else %}
AND block_number <= 149500000

View File

@ -9,6 +9,7 @@
) }}
WITH bronze_traces AS (
SELECT
block_number,
partition_key,
@ -16,8 +17,7 @@ WITH bronze_traces AS (
DATA :result AS full_traces,
_inserted_timestamp
FROM
{% if is_incremental() and not full_reload_mode %}
{% if is_incremental() and not var('full_reload_mode', false) %}
{{ ref('bronze__streamline_traces') }}
WHERE
_inserted_timestamp >= (
@ -27,31 +27,42 @@ WITH bronze_traces AS (
{{ this }}
)
AND DATA :result IS NOT NULL
and block_number > 160000000
and partition_key > 160000000
{% elif is_incremental() and full_reload_mode %}
{% elif is_incremental() and var('full_reload_mode', false) and not var('initial_load', false) %}
{{ ref('bronze__streamline_fr_traces') }}
WHERE
partition_key BETWEEN (
SELECT
MAX(partition_key) - 100000
FROM
{{ this }}
)
AND (
SELECT
MAX(partition_key) + 5000000
FROM
{{ this }}
)
DATA :result IS NOT NULL
AND partition_key BETWEEN (
SELECT
ROUND(MAX(block_number),-3)
FROM
{{ this }}
WHERE
block_number < 80000000
) - 100000
AND (
SELECT
ROUND(MAX(block_number),-3)
FROM
{{ this }}
WHERE
block_number < 80000000
) + 2000000
{% elif var('initial_load', false) %}
{{ ref('bronze__streamline_fr_traces') }}
WHERE
DATA :result IS NOT NULL
AND block_number BETWEEN 0 AND 5000000
and partition_key < 5500000
{% else %}
{{ ref('bronze__streamline_fr_traces') }}
WHERE partition_key <= 149500000
WHERE block_number <= 149500000
{% endif %}
and block_number > 160000000
and partition_key > 160000000
qualify(ROW_NUMBER() over (PARTITION BY block_number, tx_position
ORDER BY
_inserted_timestamp DESC)) = 1